Coverage for python/lsst/meas/extensions/scarlet/scarletDeblendTask.py: 16%
405 statements
« prev ^ index » next coverage.py v7.3.2, created at 2023-10-12 10:45 +0000
« prev ^ index » next coverage.py v7.3.2, created at 2023-10-12 10:45 +0000
1# This file is part of meas_extensions_scarlet.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
22from functools import partial
23import logging
24import numpy as np
26import lsst.pex.config as pexConfig
27import lsst.pipe.base as pipeBase
28import lsst.geom as geom
29import lsst.afw.geom.ellipses as afwEll
30import lsst.afw.image as afwImage
31import lsst.afw.detection as afwDet
32import lsst.afw.table as afwTable
33import lsst.scarlet.lite as scl
34from lsst.utils.logging import PeriodicLogger
35from lsst.utils.timer import timeMethod
36from lsst.afw.image import IncompleteDataError
38from . import io
39from .utils import bboxToScarletBox, defaultBadPixelMasks, buildObservation
41# Scarlet and proxmin have a different definition of log levels than the stack,
42# so even "warnings" occur far more often than we would like.
43# So for now we only display scarlet and proxmin errors, as all other
44# scarlet outputs would be considered "TRACE" by our standards.
45scarletLogger = logging.getLogger("scarlet")
46scarletLogger.setLevel(logging.ERROR)
47proxminLogger = logging.getLogger("proxmin")
48proxminLogger.setLevel(logging.ERROR)
50__all__ = ["deblend", "deblend_old_lite", "ScarletDeblendConfig", "ScarletDeblendTask"]
52logger = logging.getLogger(__name__)
55class ScarletGradientError(Exception):
56 """An error occurred during optimization
58 This error occurs when the optimizer encounters
59 a NaN value while calculating the gradient.
60 """
61 def __init__(self, iterations, sources):
62 self.iterations = iterations
63 self.sources = sources
64 msg = ("ScalarGradientError in iteration {0}. "
65 "NaN values introduced in sources {1}")
66 self.message = msg.format(iterations, sources)
68 def __str__(self):
69 return self.message
72def _checkBlendConvergence(blend, f_rel):
73 """Check whether or not a blend has converged
74 """
75 deltaLoss = np.abs(blend.loss[-2] - blend.loss[-1])
76 convergence = f_rel * np.abs(blend.loss[-1])
77 return deltaLoss < convergence
80def isPseudoSource(source, pseudoColumns):
81 """Check if a source is a pseudo source.
83 This is mostly for skipping sky objects,
84 but any other column can also be added to disable
85 deblending on a parent or individual source when
86 set to `True`.
88 Parameters
89 ----------
90 source : `lsst.afw.table.source.source.SourceRecord`
91 The source to check for the pseudo bit.
92 pseudoColumns : `list` of `str`
93 A list of columns to check for pseudo sources.
94 """
95 isPseudo = False
96 for col in pseudoColumns:
97 try:
98 isPseudo |= source[col]
99 except KeyError:
100 pass
101 return isPseudo
104def computePsfKernelImage(mExposure, psfCenter):
105 """Compute the PSF kernel image and update the multiband exposure
106 if not all of the PSF images could be computed.
108 Parameters
109 ----------
110 psfCenter : `tuple` or `Point2I` or `Point2D`
111 The location `(x, y)` used as the center of the PSF.
113 Returns
114 -------
115 psfModels : `np.ndarray`
116 The multiband PSF image
117 mExposure : `MultibandExposure`
118 The exposure, updated to only use bands that
119 successfully generated a PSF image.
120 """
121 if not isinstance(psfCenter, geom.Point2D):
122 psfCenter = geom.Point2D(*psfCenter)
124 try:
125 psfModels = mExposure.computePsfKernelImage(psfCenter)
126 except IncompleteDataError as e:
127 psfModels = e.partialPsf
128 # Use only the bands that successfully generated a PSF image.
129 bands = psfModels.filters
130 mExposure = mExposure[bands,]
131 if len(bands) == 1:
132 # Only a single band generated a PSF, so the MultibandExposure
133 # became a single band ExposureF.
134 # Convert the result back into a MultibandExposure.
135 mExposure = afwImage.MultibandExposure.fromExposures(bands, [mExposure])
136 return psfModels.array, mExposure
139def deblend(mExposure, modelPsf, footprint, config, spectrumInit, monotonicity, wavelets=None):
140 """Deblend a parent footprint
142 Parameters
143 ----------
144 mExposure : `lsst.image.MultibandExposure`
145 - The multiband exposure containing the image,
146 mask, and variance data
147 footprint : `lsst.detection.Footprint`
148 - The footprint of the parent to deblend
149 config : `ScarletDeblendConfig`
150 - Configuration of the deblending task
151 spectrumInit : `bool`
152 Whether or not to initialize the model using the spectrum.
153 monotonicity: `lsst.scarlet.lite.operators.Monotonicity`
154 The monotonicity operator.
155 wavelets : `numpy.ndarray`
156 Pre-generated wavelets to use if using wavelet initialization.
158 Returns
159 -------
160 blend : `scarlet.lite.Blend`
161 The blend this is to be deblended
162 skippedSources : `list[int]`
163 Indices of sources that were skipped due to no flux.
164 This usually means that a source was a spurrious detection in one
165 band that should not have been included in the merged catalog.
166 skippedBands : `list[str]`
167 Bands that were skipped because a PSF could not be generated for them.
168 """
169 # Extract coordinates from each MultiColorPeak
170 bbox = footprint.getBBox()
171 psfCenter = footprint.getCentroid()
173 observation = buildObservation(
174 modelPsf=modelPsf,
175 psfCenter=psfCenter,
176 mExposure=mExposure[:, bbox],
177 footprint=footprint,
178 badPixelMasks=config.badMask,
179 useWeights=config.useWeights,
180 convolutionType=config.convolutionType,
181 )
183 # Convert the peaks into an array
184 peaks = [
185 np.array([peak.getIy(), peak.getIx()], dtype=int)
186 for peak in footprint.peaks
187 if not isPseudoSource(peak, config.pseudoColumns)
188 ]
190 # Initialize the sources
191 if config.morphImage == "chi2":
192 sources = scl.initialization.FactorizedChi2Initialization(
193 observation=observation,
194 centers=peaks,
195 min_snr=config.minSNR,
196 monotonicity=monotonicity,
197 thresh=config.backgroundThresh,
198 ).sources
199 elif config.morphImage == "wavelet":
200 _bbox = bboxToScarletBox(len(mExposure.filters), bbox, bbox.getMin())
201 _wavelets = wavelets[(slice(None), *_bbox[1:].slices)]
203 sources = scl.initialization.FactorizedWaveletInitialization(
204 observation=observation,
205 centers=peaks,
206 use_psf=False,
207 wavelets=_wavelets,
208 monotonicity=monotonicity,
209 min_snr=config.minSNR,
210 thresh=config.backgroundThresh,
211 ).sources
212 else:
213 raise ValueError("morphImage must be either 'chi2' or 'wavelet'.")
215 blend = scl.Blend(sources, observation)
217 # Initialize each source with its best fit spectrum
218 if spectrumInit:
219 blend.fit_spectra()
221 # Set the optimizer
222 if config.optimizer == "adaprox":
223 blend.parameterize(partial(
224 scl.component.default_adaprox_parameterization,
225 noise_rms=observation.noise_rms/10,
226 ))
227 elif config.optimizer == "fista":
228 blend.parameterize(scl.component.default_fista_parameterization)
229 else:
230 raise ValueError("Unrecognized optimizer. Must be either 'adaprox' or 'fista'.")
232 blend.fit(
233 max_iter=config.maxIter,
234 e_rel=config.relativeError,
235 min_iter=config.minIter,
236 )
238 # Attach the peak to all of the initialized sources
239 for k, center in enumerate(peaks):
240 # This is just to make sure that there isn't a coding bug
241 if len(sources[k].components) > 0 and np.any(sources[k].center != center):
242 raise ValueError(f"Misaligned center, expected {center} but got {sources[k].center}")
243 # Store the record for the peak with the appropriate source
244 sources[k].detectedPeak = footprint.peaks[k]
246 # Set the sources that could not be initialized and were skipped
247 skippedSources = [src for src in sources if src.is_null]
249 # Store the location of the PSF center for storage
250 blend.psfCenter = (psfCenter.x, psfCenter.y)
252 # Calculate the bands that were skipped
253 skippedBands = [band for band in mExposure.filters if band not in observation.bands]
255 return blend, skippedSources, skippedBands
258def buildOldObservation(
259 modelPsf,
260 psfCenter,
261 mExposure,
262 footprint=None,
263 badPixelMasks=None,
264 useWeights=True,
265 convolutionType="real",
266):
267 """Generate a LiteObservation from a set of parameters.
269 Make the generation and reconstruction of a scarlet model consistent
270 by building a `LiteObservation` from a set of parameters.
272 Parameters
273 ----------
274 modelPsf : `numpy.ndarray`
275 The 2D model of the PSF in the partially deconvolved space.
276 psfCenter : `tuple` or `Point2I` or `Point2D`
277 The location `(x, y)` used as the center of the PSF.
278 mExposure : `lsst.afw.image.multiband.MultibandExposure`
279 The multi-band exposure that the model represents.
280 If `mExposure` is `None` then no image, variance, or weights are
281 attached to the observation.
282 footprint : `lsst.afw.detection.Footprint`
283 The footprint that is being fit.
284 If `Footprint` is `None` then the weights are not updated to mask
285 out pixels not contained in the footprint.
286 badPixelMasks : `list` of `str`
287 The keys from the bit mask plane used to mask out pixels
288 during the fit.
289 If `badPixelMasks` is `None` then the default values from
290 `ScarletDeblendConfig.badMask` is used.
291 useWeights : `bool`
292 Whether or not fitting should use inverse variance weights to
293 calculate the log-likelihood.
294 convolutionType : `str`
295 The type of convolution to use (either "real" or "fft").
296 When reconstructing an image it is advised to use "real" to avoid
297 polluting the footprint with
299 Returns
300 -------
301 observation : `scarlet.lite.LiteObservation`
302 The observation constructed from the input parameters.
303 """
304 from scarlet import lite
305 # Initialize the observed PSFs
306 psfModels, mExposure = computePsfKernelImage(mExposure, psfCenter)
308 # Use the inverse variance as the weights
309 if useWeights:
310 weights = 1/mExposure.variance.array
311 else:
312 # Mask out bad pixels
313 weights = np.ones_like(mExposure.image.array)
314 if badPixelMasks is None:
315 badPixelMasks = ScarletDeblendConfig().badMask
316 badPixels = mExposure.mask.getPlaneBitMask(badPixelMasks)
317 mask = mExposure.mask.array & badPixels
318 weights[mask > 0] = 0
320 if footprint is not None:
321 # Mask out the pixels outside the footprint
322 weights *= footprint.spans.asArray()
324 observation = lite.LiteObservation(
325 images=mExposure.image.array,
326 variance=mExposure.variance.array,
327 weights=weights,
328 psfs=psfModels,
329 model_psf=modelPsf[None, :, :],
330 convolution_mode=convolutionType,
331 )
333 # Store the bands used to create the observation
334 observation.bands = mExposure.filters
335 return observation
338def deblend_old_lite(mExposure, modelPsf, footprint, config, spectrumInit, wavelets=None):
339 """Deblend a parent footprint
341 Parameters
342 ----------
343 mExposure : `lsst.image.MultibandExposure`
344 - The multiband exposure containing the image,
345 mask, and variance data
346 footprint : `lsst.detection.Footprint`
347 - The footprint of the parent to deblend
348 config : `ScarletDeblendConfig`
349 - Configuration of the deblending task
351 Returns
352 -------
353 blend : `scarlet.lite.Blend`
354 The blend this is to be deblended
355 skippedSources : `list[int]`
356 Indices of sources that were skipped due to no flux.
357 This usually means that a source was a spurrious detection in one
358 band that should not have been included in the merged catalog.
359 skippedBands : `list[str]`
360 Bands that were skipped because a PSF could not be generated for them.
361 """
362 from scarlet import lite
363 # Extract coordinates from each MultiColorPeak
364 bbox = footprint.getBBox()
365 psfCenter = footprint.getCentroid()
367 observation = buildOldObservation(
368 modelPsf=modelPsf,
369 psfCenter=psfCenter,
370 mExposure=mExposure[:, bbox],
371 footprint=footprint,
372 badPixelMasks=config.badMask,
373 useWeights=config.useWeights,
374 convolutionType=config.convolutionType,
375 )
377 # Convert the centers to pixel coordinates
378 xmin = bbox.getMinX()
379 ymin = bbox.getMinY()
380 centers = [
381 np.array([peak.getIy() - ymin, peak.getIx() - xmin], dtype=int)
382 for peak in footprint.peaks
383 if not isPseudoSource(peak, config.pseudoColumns)
384 ]
386 # Initialize the sources
387 if config.morphImage == "chi2":
388 sources = lite.init_all_sources_main(
389 observation,
390 centers,
391 min_snr=config.minSNR,
392 thresh=config.morphThresh,
393 )
394 elif config.morphImage == "wavelet":
395 _bbox = bboxToScarletBox(len(mExposure.filters), bbox, bbox.getMin())
396 _wavelets = wavelets[(slice(None), *_bbox[1:].slices)]
397 sources = lite.init_all_sources_wavelets(
398 observation,
399 centers,
400 use_psf=False,
401 wavelets=_wavelets,
402 min_snr=config.minSNR,
403 )
404 else:
405 raise ValueError("morphImage must be either 'chi2' or 'wavelet'.")
407 # Set the optimizer
408 if config.optimizer == "adaprox":
409 parameterization = partial(
410 lite.init_adaprox_component,
411 bg_thresh=config.backgroundThresh,
412 max_prox_iter=config.maxProxIter,
413 )
414 elif config.optimizer == "fista":
415 parameterization = partial(
416 lite.init_fista_component,
417 bg_thresh=config.backgroundThresh,
418 )
419 else:
420 raise ValueError("Unrecognized optimizer. Must be either 'adaprox' or 'fista'.")
421 sources = lite.parameterize_sources(sources, observation, parameterization)
423 # Attach the peak to all of the initialized sources
424 for k, center in enumerate(centers):
425 # This is just to make sure that there isn't a coding bug
426 if len(sources[k].components) > 0 and np.any(sources[k].center != center):
427 raise ValueError("Misaligned center, expected {center} but got {sources[k].center}")
428 # Store the record for the peak with the appropriate source
429 sources[k].detectedPeak = footprint.peaks[k]
431 blend = lite.LiteBlend(sources, observation)
433 # Initialize each source with its best fit spectrum
434 if spectrumInit:
435 blend.fit_spectra()
437 # Set the sources that could not be initialized and were skipped
438 skippedSources = [src for src in sources if src.is_null]
440 blend.fit(
441 max_iter=config.maxIter,
442 e_rel=config.relativeError,
443 min_iter=config.minIter,
444 reweight=False,
445 )
447 # Store the location of the PSF center for storage
448 blend.psfCenter = (psfCenter.x, psfCenter.y)
450 # Calculate the bands that were skipped
451 skippedBands = [band for band in mExposure.filters if band not in observation.bands]
453 return blend, skippedSources, skippedBands
456class ScarletDeblendConfig(pexConfig.Config):
457 """MultibandDeblendConfig
459 Configuration for the multiband deblender.
460 The parameters are organized by the parameter types, which are
461 - Stopping Criteria: Used to determine if the fit has converged
462 - Position Fitting Criteria: Used to fit the positions of the peaks
463 - Constraints: Used to apply constraints to the peaks and their components
464 - Other: Parameters that don't fit into the above categories
465 """
466 # Stopping Criteria
467 minIter = pexConfig.Field(dtype=int, default=15,
468 doc="Minimum number of iterations before the optimizer is allowed to stop.")
469 maxIter = pexConfig.Field(dtype=int, default=300,
470 doc=("Maximum number of iterations to deblend a single parent"))
471 relativeError = pexConfig.Field(dtype=float, default=1e-2,
472 doc=("Change in the loss function between iterations to exit fitter. "
473 "Typically this is `1e-2` if measurements will be made on the "
474 "flux re-distributed models and `1e-4` when making measurements "
475 "on the models themselves."))
477 # Constraints
478 morphThresh = pexConfig.Field(dtype=float, default=1,
479 doc="Fraction of background RMS a pixel must have"
480 "to be included in the initial morphology")
481 # Lite Parameters
482 # All of these parameters (except version) are only valid if version='lite'
483 version = pexConfig.ChoiceField(
484 dtype=str,
485 default="lite",
486 allowed={
487 "old_lite": "scarlet lite from the scarlet main package",
488 "lite": "LSST optimized version of scarlet for survey data from a single instrument",
489 },
490 doc="The version of scarlet to use.",
491 )
492 optimizer = pexConfig.ChoiceField(
493 dtype=str,
494 default="adaprox",
495 allowed={
496 "adaprox": "Proximal ADAM optimization",
497 "fista": "Accelerated proximal gradient method",
498 },
499 doc="The optimizer to use for fitting parameters and is only used when version='lite'",
500 )
501 morphImage = pexConfig.ChoiceField(
502 dtype=str,
503 default="chi2",
504 allowed={
505 "chi2": "Initialize sources on a chi^2 image made from all available bands",
506 "wavelet": "Initialize sources using a wavelet decomposition of the chi^2 image",
507 },
508 doc="The type of image to use for initializing the morphology. "
509 "Must be either 'chi2' or 'wavelet'. "
510 )
511 backgroundThresh = pexConfig.Field(
512 dtype=float,
513 default=0.25,
514 doc="Fraction of background to use for a sparsity threshold. "
515 "This prevents sources from growing unrealistically outside "
516 "the parent footprint while still modeling flux correctly "
517 "for bright sources."
518 )
519 maxProxIter = pexConfig.Field(
520 dtype=int,
521 default=1,
522 doc="Maximum number of proximal operator iterations inside of each "
523 "iteration of the optimizer. "
524 "This config field is only used if version='lite' and optimizer='adaprox'."
525 )
526 waveletScales = pexConfig.Field(
527 dtype=int,
528 default=5,
529 doc="Number of wavelet scales to use for wavelet initialization. "
530 "This field is only used when `version`='lite' and `morphImage`='wavelet'."
531 )
533 # Other scarlet paremeters
534 useWeights = pexConfig.Field(
535 dtype=bool, default=True,
536 doc=("Whether or not use use inverse variance weighting."
537 "If `useWeights` is `False` then flat weights are used"))
538 modelPsfSize = pexConfig.Field(
539 dtype=int, default=11,
540 doc="Model PSF side length in pixels")
541 modelPsfSigma = pexConfig.Field(
542 dtype=float, default=0.8,
543 doc="Define sigma for the model frame PSF")
544 minSNR = pexConfig.Field(
545 dtype=float, default=50,
546 doc="Minimum Signal to noise to accept the source."
547 "Sources with lower flux will be initialized with the PSF but updated "
548 "like an ordinary ExtendedSource (known in scarlet as a `CompactSource`).")
549 saveTemplates = pexConfig.Field(
550 dtype=bool, default=True,
551 doc="Whether or not to save the SEDs and templates")
552 processSingles = pexConfig.Field(
553 dtype=bool, default=True,
554 doc="Whether or not to process isolated sources in the deblender")
555 convolutionType = pexConfig.Field(
556 dtype=str, default="fft",
557 doc="Type of convolution to render the model to the observations.\n"
558 "- 'fft': perform convolutions in Fourier space\n"
559 "- 'real': peform convolutions in real space.")
560 sourceModel = pexConfig.Field(
561 dtype=str, default="double",
562 doc=("How to determine which model to use for sources, from\n"
563 "- 'single': use a single component for all sources\n"
564 "- 'double': use a bulge disk model for all sources\n"
565 "- 'compact': use a single component model, initialzed with a point source morphology, "
566 " for all sources\n"
567 "- 'point': use a point-source model for all sources\n"
568 "- 'fit: use a PSF fitting model to determine the number of components (not yet implemented)"),
569 deprecated="This field will be deprecated when the default for `version` is changed to `lite`.",
570 )
571 setSpectra = pexConfig.Field(
572 dtype=bool, default=True,
573 doc="Whether or not to solve for the best-fit spectra during initialization. "
574 "This makes initialization slightly longer, as it requires a convolution "
575 "to set the optimal spectra, but results in a much better initial log-likelihood "
576 "and reduced total runtime, with convergence in fewer iterations."
577 "This option is only used when "
578 "peaks*area < `maxSpectrumCutoff` will use the improved initialization.")
580 # Mask-plane restrictions
581 badMask = pexConfig.ListField(
582 dtype=str, default=defaultBadPixelMasks,
583 doc="Whether or not to process isolated sources in the deblender")
584 statsMask = pexConfig.ListField(dtype=str, default=["SAT", "INTRP", "NO_DATA"],
585 doc="Mask planes to ignore when performing statistics")
586 maskLimits = pexConfig.DictField(
587 keytype=str,
588 itemtype=float,
589 default={},
590 doc=("Mask planes with the corresponding limit on the fraction of masked pixels. "
591 "Sources violating this limit will not be deblended. "
592 "If the fraction is `0` then the limit is a single pixel."),
593 )
595 # Size restrictions
596 maxNumberOfPeaks = pexConfig.Field(
597 dtype=int, default=200,
598 doc=("Only deblend the brightest maxNumberOfPeaks peaks in the parent"
599 " (<= 0: unlimited)"))
600 maxFootprintArea = pexConfig.Field(
601 dtype=int, default=100_000,
602 doc=("Maximum area for footprints before they are ignored as large; "
603 "non-positive means no threshold applied"))
604 maxAreaTimesPeaks = pexConfig.Field(
605 dtype=int, default=10_000_000,
606 doc=("Maximum rectangular footprint area * nPeaks in the footprint. "
607 "This was introduced in DM-33690 to prevent fields that are crowded or have a "
608 "LSB galaxy that causes memory intensive initialization in scarlet from dominating "
609 "the overall runtime and/or causing the task to run out of memory. "
610 "(<= 0: unlimited)")
611 )
612 maxFootprintSize = pexConfig.Field(
613 dtype=int, default=0,
614 doc=("Maximum linear dimension for footprints before they are ignored "
615 "as large; non-positive means no threshold applied"))
616 minFootprintAxisRatio = pexConfig.Field(
617 dtype=float, default=0.0,
618 doc=("Minimum axis ratio for footprints before they are ignored "
619 "as large; non-positive means no threshold applied"))
620 maxSpectrumCutoff = pexConfig.Field(
621 dtype=int, default=1_000_000,
622 doc=("Maximum number of pixels * number of sources in a blend. "
623 "This is different than `maxFootprintArea` because this isn't "
624 "the footprint area but the area of the bounding box that "
625 "contains the footprint, and is also multiplied by the number of"
626 "sources in the footprint. This prevents large skinny blends with "
627 "a high density of sources from running out of memory. "
628 "If `maxSpectrumCutoff == -1` then there is no cutoff.")
629 )
630 # Failure modes
631 fallback = pexConfig.Field(
632 dtype=bool, default=True,
633 doc="Whether or not to fallback to a smaller number of components if a source does not initialize"
634 )
635 notDeblendedMask = pexConfig.Field(
636 dtype=str, default="NOT_DEBLENDED", optional=True,
637 doc="Mask name for footprints not deblended, or None")
638 catchFailures = pexConfig.Field(
639 dtype=bool, default=True,
640 doc=("If True, catch exceptions thrown by the deblender, log them, "
641 "and set a flag on the parent, instead of letting them propagate up"))
643 # Other options
644 columnInheritance = pexConfig.DictField(
645 keytype=str, itemtype=str, default={
646 "deblend_nChild": "deblend_parentNChild",
647 "deblend_nPeaks": "deblend_parentNPeaks",
648 "deblend_spectrumInitFlag": "deblend_spectrumInitFlag",
649 "deblend_blendConvergenceFailedFlag": "deblend_blendConvergenceFailedFlag",
650 },
651 doc="Columns to pass from the parent to the child. "
652 "The key is the name of the column for the parent record, "
653 "the value is the name of the column to use for the child."
654 )
655 pseudoColumns = pexConfig.ListField(
656 dtype=str, default=['merge_peak_sky', 'sky_source'],
657 doc="Names of flags which should never be deblended."
658 )
660 # Testing options
661 # Some obs packages and ci packages run the full pipeline on a small
662 # subset of data to test that the pipeline is functioning properly.
663 # This is not meant as scientific validation, so it can be useful
664 # to only run on a small subset of the data that is large enough to
665 # test the desired pipeline features but not so long that the deblender
666 # is the tall pole in terms of execution times.
667 useCiLimits = pexConfig.Field(
668 dtype=bool, default=False,
669 doc="Limit the number of sources deblended for CI to prevent long build times")
670 ciDeblendChildRange = pexConfig.ListField(
671 dtype=int, default=[5, 10],
672 doc="Only deblend parent Footprints with a number of peaks in the (inclusive) range indicated."
673 "If `useCiLimits==False` then this parameter is ignored.")
674 ciNumParentsToDeblend = pexConfig.Field(
675 dtype=int, default=10,
676 doc="Only use the first `ciNumParentsToDeblend` parent footprints with a total peak count "
677 "within `ciDebledChildRange`. "
678 "If `useCiLimits==False` then this parameter is ignored.")
681class ScarletDeblendTask(pipeBase.Task):
682 """ScarletDeblendTask
684 Split blended sources into individual sources.
686 This task has no return value; it only modifies the SourceCatalog in-place.
687 """
688 ConfigClass = ScarletDeblendConfig
689 _DefaultName = "scarletDeblend"
691 def __init__(self, schema, peakSchema=None, **kwargs):
692 """Create the task, adding necessary fields to the given schema.
694 Parameters
695 ----------
696 schema : `lsst.afw.table.schema.schema.Schema`
697 Schema object for measurement fields; will be modified in-place.
698 peakSchema : `lsst.afw.table.schema.schema.Schema`
699 Schema of Footprint Peaks that will be passed to the deblender.
700 Any fields beyond the PeakTable minimal schema will be transferred
701 to the main source Schema. If None, no fields will be transferred
702 from the Peaks.
703 filters : list of str
704 Names of the filters used for the eposures. This is needed to store
705 the SED as a field
706 **kwargs
707 Passed to Task.__init__.
708 """
709 pipeBase.Task.__init__(self, **kwargs)
711 peakMinimalSchema = afwDet.PeakTable.makeMinimalSchema()
712 if peakSchema is None:
713 # In this case, the peakSchemaMapper will transfer nothing, but
714 # we'll still have one
715 # to simplify downstream code
716 self.peakSchemaMapper = afwTable.SchemaMapper(peakMinimalSchema, schema)
717 else:
718 self.peakSchemaMapper = afwTable.SchemaMapper(peakSchema, schema)
719 for item in peakSchema:
720 if item.key not in peakMinimalSchema:
721 self.peakSchemaMapper.addMapping(item.key, item.field)
722 # Because SchemaMapper makes a copy of the output schema
723 # you give its ctor, it isn't updating this Schema in
724 # place. That's probably a design flaw, but in the
725 # meantime, we'll keep that schema in sync with the
726 # peakSchemaMapper.getOutputSchema() manually, by adding
727 # the same fields to both.
728 schema.addField(item.field)
729 assert schema == self.peakSchemaMapper.getOutputSchema(), "Logic bug mapping schemas"
730 self._addSchemaKeys(schema)
731 self.schema = schema
732 self.toCopyFromParent = [item.key for item in self.schema
733 if item.field.getName().startswith("merge_footprint")]
735 def _addSchemaKeys(self, schema):
736 """Add deblender specific keys to the schema
737 """
738 # Parent (blend) fields
739 self.runtimeKey = schema.addField('deblend_runtime', type=np.float32, doc='runtime in ms')
740 self.iterKey = schema.addField('deblend_iterations', type=np.int32, doc='iterations to converge')
741 self.nChildKey = schema.addField('deblend_nChild', type=np.int32,
742 doc='Number of children this object has (defaults to 0)')
743 self.nPeaksKey = schema.addField("deblend_nPeaks", type=np.int32,
744 doc="Number of initial peaks in the blend. "
745 "This includes peaks that may have been culled "
746 "during deblending or failed to deblend")
747 # Skipped flags
748 self.deblendSkippedKey = schema.addField('deblend_skipped', type='Flag',
749 doc="Deblender skipped this source")
750 self.isolatedParentKey = schema.addField('deblend_isolatedParent', type='Flag',
751 doc='The source has only a single peak '
752 'and was not deblended')
753 self.pseudoKey = schema.addField('deblend_isPseudo', type='Flag',
754 doc='The source is identified as a "pseudo" source and '
755 'was not deblended')
756 self.tooManyPeaksKey = schema.addField('deblend_tooManyPeaks', type='Flag',
757 doc='Source had too many peaks; '
758 'only the brightest were included')
759 self.tooBigKey = schema.addField('deblend_parentTooBig', type='Flag',
760 doc='Parent footprint covered too many pixels')
761 self.maskedKey = schema.addField('deblend_masked', type='Flag',
762 doc='Parent footprint had too many masked pixels')
763 # Convergence flags
764 self.sedNotConvergedKey = schema.addField('deblend_sedConvergenceFailed', type='Flag',
765 doc='scarlet sed optimization did not converge before'
766 'config.maxIter')
767 self.morphNotConvergedKey = schema.addField('deblend_morphConvergenceFailed', type='Flag',
768 doc='scarlet morph optimization did not converge before'
769 'config.maxIter')
770 self.blendConvergenceFailedFlagKey = schema.addField('deblend_blendConvergenceFailedFlag',
771 type='Flag',
772 doc='at least one source in the blend'
773 'failed to converge')
774 # Error flags
775 self.deblendFailedKey = schema.addField('deblend_failed', type='Flag',
776 doc="Deblending failed on source")
777 self.deblendErrorKey = schema.addField('deblend_error', type="String", size=25,
778 doc='Name of error if the blend failed')
779 self.incompleteDataKey = schema.addField('deblend_incompleteData', type='Flag',
780 doc='True when a blend has at least one band '
781 'that could not generate a PSF and was '
782 'not included in the model.')
783 # Deblended source fields
784 self.peakCenter = afwTable.Point2IKey.addFields(schema, name="deblend_peak_center",
785 doc="Center used to apply constraints in scarlet",
786 unit="pixel")
787 self.peakIdKey = schema.addField("deblend_peakId", type=np.int32,
788 doc="ID of the peak in the parent footprint. "
789 "This is not unique, but the combination of 'parent'"
790 "and 'peakId' should be for all child sources. "
791 "Top level blends with no parents have 'peakId=0'")
792 self.modelCenterFlux = schema.addField('deblend_peak_instFlux', type=float, units='count',
793 doc="The instFlux at the peak position of deblended mode")
794 self.modelTypeKey = schema.addField("deblend_modelType", type="String", size=25,
795 doc="The type of model used, for example "
796 "MultiExtendedSource, SingleExtendedSource, PointSource")
797 self.parentNPeaksKey = schema.addField("deblend_parentNPeaks", type=np.int32,
798 doc="deblend_nPeaks from this records parent.")
799 self.parentNChildKey = schema.addField("deblend_parentNChild", type=np.int32,
800 doc="deblend_nChild from this records parent.")
801 self.scarletFluxKey = schema.addField("deblend_scarletFlux", type=np.float32,
802 doc="Flux measurement from scarlet")
803 self.scarletLogLKey = schema.addField("deblend_logL", type=np.float32,
804 doc="Final logL, used to identify regressions in scarlet.")
805 self.edgePixelsKey = schema.addField('deblend_edgePixels', type='Flag',
806 doc='Source had flux on the edge of the parent footprint')
807 self.scarletSpectrumInitKey = schema.addField("deblend_spectrumInitFlag", type='Flag',
808 doc="True when scarlet initializes sources "
809 "in the blend with a more accurate spectrum. "
810 "The algorithm uses a lot of memory, "
811 "so large dense blends will use "
812 "a less accurate initialization.")
813 self.nComponentsKey = schema.addField("deblend_nComponents", type=np.int32,
814 doc="Number of components in a ScarletLiteSource. "
815 "If `config.version != 'lite'`then "
816 "this column is set to zero.")
817 self.psfKey = schema.addField('deblend_deblendedAsPsf', type='Flag',
818 doc='Deblender thought this source looked like a PSF')
819 self.coverageKey = schema.addField('deblend_dataCoverage', type=np.float32,
820 doc='Fraction of pixels with data. '
821 'In other words, 1 - fraction of pixels with NO_DATA set.')
822 # Blendedness/classification metrics
823 self.maxOverlapKey = schema.addField("deblend_maxOverlap", type=np.float32,
824 doc="Maximum overlap with all of the other neighbors flux "
825 "combined."
826 "This is useful as a metric for determining how blended a "
827 "source is because if it only overlaps with other sources "
828 "at or below the noise level, it is likely to be a mostly "
829 "isolated source in the deconvolved model frame.")
830 self.fluxOverlapKey = schema.addField("deblend_fluxOverlap", type=np.float32,
831 doc="This is the total flux from neighboring objects that "
832 "overlaps with this source.")
833 self.fluxOverlapFractionKey = schema.addField("deblend_fluxOverlapFraction", type=np.float32,
834 doc="This is the fraction of "
835 "`flux from neighbors/source flux` "
836 "for a given source within the source's"
837 "footprint.")
838 self.blendednessKey = schema.addField("deblend_blendedness", type=np.float32,
839 doc="The Bosch et al. 2018 metric for 'blendedness.' ")
841 @timeMethod
842 def run(self, mExposure, mergedSources):
843 """Get the psf from each exposure and then run deblend().
845 Parameters
846 ----------
847 mExposure : `MultibandExposure`
848 The exposures should be co-added images of the same
849 shape and region of the sky.
850 mergedSources : `SourceCatalog`
851 The merged `SourceCatalog` that contains parent footprints
852 to (potentially) deblend.
854 Returns
855 -------
856 templateCatalogs: dict
857 Keys are the names of the filters and the values are
858 `lsst.afw.table.source.source.SourceCatalog`'s.
859 These are catalogs with heavy footprints that are the templates
860 created by the multiband templates.
861 """
862 return self.deblend(mExposure, mergedSources)
864 @timeMethod
865 def deblend(self, mExposure, catalog):
866 """Deblend a data cube of multiband images
868 Parameters
869 ----------
870 mExposure : `MultibandExposure`
871 The exposures should be co-added images of the same
872 shape and region of the sky.
873 catalog : `SourceCatalog`
874 The merged `SourceCatalog` that contains parent footprints
875 to (potentially) deblend. The new deblended sources are
876 appended to this catalog in place.
878 Returns
879 -------
880 catalogs : `dict` or `None`
881 Keys are the names of the filters and the values are
882 `lsst.afw.table.source.source.SourceCatalog`'s.
883 These are catalogs with heavy footprints that are the templates
884 created by the multiband templates.
885 """
886 import time
888 # Cull footprints if required by ci
889 if self.config.useCiLimits:
890 self.log.info("Using CI catalog limits, the original number of sources to deblend was %d.",
891 len(catalog))
892 # Select parents with a number of children in the range
893 # config.ciDeblendChildRange
894 minChildren, maxChildren = self.config.ciDeblendChildRange
895 nPeaks = np.array([len(src.getFootprint().peaks) for src in catalog])
896 childrenInRange = np.where((nPeaks >= minChildren) & (nPeaks <= maxChildren))[0]
897 if len(childrenInRange) < self.config.ciNumParentsToDeblend:
898 raise ValueError("Fewer than ciNumParentsToDeblend children were contained in the range "
899 "indicated by ciDeblendChildRange. Adjust this range to include more "
900 "parents.")
901 # Keep all of the isolated parents and the first
902 # `ciNumParentsToDeblend` children
903 parents = nPeaks == 1
904 children = np.zeros((len(catalog),), dtype=bool)
905 children[childrenInRange[:self.config.ciNumParentsToDeblend]] = True
906 catalog = catalog[parents | children]
907 # We need to update the IdFactory, otherwise the the source ids
908 # will not be sequential
909 idFactory = catalog.getIdFactory()
910 maxId = np.max(catalog["id"])
911 idFactory.notify(maxId)
913 self.log.info("Deblending %d sources in %d exposure bands", len(catalog), len(mExposure))
914 periodicLog = PeriodicLogger(self.log)
916 # Create a set of wavelet coefficients if using wavelet initialization
917 if self.config.morphImage == "wavelet":
918 images = mExposure.image.array
919 variance = mExposure.variance.array
920 wavelets = scl.detect.get_detect_wavelets(images, variance, scales=self.config.waveletScales)
921 else:
922 wavelets = None
924 # Add the NOT_DEBLENDED mask to the mask plane in each band
925 if self.config.notDeblendedMask:
926 for mask in mExposure.mask:
927 mask.addMaskPlane(self.config.notDeblendedMask)
929 # Initialize the persistable data model
930 modelPsf = scl.utils.integrated_circular_gaussian(sigma=self.config.modelPsfSigma)
931 dataModel = scl.io.ScarletModelData(modelPsf)
933 # Initialize the monotonicity operator with a size of 101 x 101 pixels.
934 # Note: If a component is > 101x101 in either axis then the
935 # monotonicity operator will resize itself.
936 monotonicity = scl.operators.Monotonicity((101, 101))
938 nParents = len(catalog)
939 nDeblendedParents = 0
940 skippedParents = []
941 for parentIndex in range(nParents):
942 parent = catalog[parentIndex]
943 foot = parent.getFootprint()
944 bbox = foot.getBBox()
945 peaks = foot.getPeaks()
947 # Since we use the first peak for the parent object, we should
948 # propagate its flags to the parent source.
949 parent.assign(peaks[0], self.peakSchemaMapper)
951 # Block of conditions for skipping a parent with multiple children
952 if (skipArgs := self._checkSkipped(parent, mExposure)) is not None:
953 self._skipParent(parent, *skipArgs)
954 skippedParents.append(parentIndex)
955 continue
957 nDeblendedParents += 1
958 self.log.trace("Parent %d: deblending %d peaks", parent.getId(), len(peaks))
959 # Run the deblender
960 blendError = None
962 # Choose whether or not to use improved spectral initialization.
963 # This significantly cuts down on the number of iterations
964 # that the optimizer needs and usually results in a better
965 # fit.
966 # But using least squares on a very large blend causes memory
967 # issues, so it is not done for large blends
968 if self.config.setSpectra:
969 if self.config.maxSpectrumCutoff <= 0:
970 spectrumInit = True
971 else:
972 spectrumInit = len(foot.peaks) * bbox.getArea() < self.config.maxSpectrumCutoff
973 else:
974 spectrumInit = False
976 try:
977 t0 = time.monotonic()
978 # Build the parameter lists with the same ordering
979 if self.config.version == "lite":
980 blend, skippedSources, skippedBands = deblend(
981 mExposure=mExposure,
982 modelPsf=modelPsf,
983 footprint=foot,
984 config=self.config,
985 spectrumInit=spectrumInit,
986 wavelets=wavelets,
987 monotonicity=monotonicity,
988 )
989 elif self.config.version == "old_lite":
990 blend, skippedSources, skippedBands = deblend_old_lite(
991 mExposure=mExposure,
992 modelPsf=modelPsf,
993 footprint=foot,
994 config=self.config,
995 spectrumInit=spectrumInit,
996 wavelets=wavelets,
997 )
998 tf = time.monotonic()
999 runtime = (tf-t0)*1000
1000 converged = _checkBlendConvergence(blend, self.config.relativeError)
1001 # Store the number of components in the blend
1002 nComponents = len(blend.components)
1003 nChild = len(blend.sources)
1004 parent.set(self.incompleteDataKey, len(skippedBands) > 0)
1005 # Catch all errors and filter out the ones that we know about
1006 except Exception as e:
1007 blendError = type(e).__name__
1008 if isinstance(e, ScarletGradientError):
1009 parent.set(self.iterKey, e.iterations)
1010 else:
1011 blendError = "UnknownError"
1012 if self.config.catchFailures:
1013 # Make it easy to find UnknownErrors in the log file
1014 self.log.warn("UnknownError")
1015 import traceback
1016 traceback.print_exc()
1017 else:
1018 raise
1020 self._skipParent(
1021 parent=parent,
1022 skipKey=self.deblendFailedKey,
1023 logMessage=f"Unable to deblend source {parent.getId}: {blendError}",
1024 )
1025 parent.set(self.deblendErrorKey, blendError)
1026 skippedParents.append(parentIndex)
1027 continue
1029 # Update the parent record with the deblending results
1030 self._updateParentRecord(
1031 parent=parent,
1032 nPeaks=len(peaks),
1033 nChild=nChild,
1034 nComponents=nComponents,
1035 runtime=runtime,
1036 iterations=len(blend.loss),
1037 logL=blend.loss[-1],
1038 spectrumInit=spectrumInit,
1039 converged=converged,
1040 )
1042 # Add each deblended source to the catalog
1043 for k, scarletSource in enumerate(blend.sources):
1044 # Skip any sources with no flux or that scarlet skipped because
1045 # it could not initialize
1046 if k in skippedSources or (self.config.version == "lite" and scarletSource.is_null):
1047 # No need to propagate anything
1048 continue
1049 parent.set(self.deblendSkippedKey, False)
1051 # Add all fields except the HeavyFootprint to the
1052 # source record
1053 sourceRecord = self._addChild(
1054 parent=parent,
1055 peak=scarletSource.detectedPeak,
1056 catalog=catalog,
1057 scarletSource=scarletSource,
1058 )
1059 scarletSource.record_id = sourceRecord.getId()
1060 scarletSource.peak_id = scarletSource.detectedPeak.getId()
1062 # Store the blend information so that it can be persisted
1063 if self.config.version == "lite":
1064 blendData = scl.io.ScarletBlendData.from_blend(blend, blend.psfCenter)
1065 else:
1066 blendData = io.oldScarletToData(blend, blend.psfCenter, bbox.getMin())
1067 dataModel.blends[parent.getId()] = blendData
1069 # Log a message if it has been a while since the last log.
1070 periodicLog.log("Deblended %d parent sources out of %d", parentIndex + 1, nParents)
1072 # Clear the cached values in scarlet to clear out memory
1073 if self.config.version == "old_lite":
1074 import scarlet
1075 scarlet.cache.Cache._cache = {}
1077 # Update the mExposure mask with the footprint of skipped parents
1078 if self.config.notDeblendedMask:
1079 for mask in mExposure.mask:
1080 for parentIndex in skippedParents:
1081 fp = catalog[parentIndex].getFootprint()
1082 fp.spans.setMask(mask, mask.getPlaneBitMask(self.config.notDeblendedMask))
1084 self.log.info("Deblender results: of %d parent sources, %d were deblended, "
1085 "creating %d children, for a total of %d sources",
1086 nParents, nDeblendedParents, len(catalog)-nParents, len(catalog))
1087 return catalog, dataModel
1089 def _isLargeFootprint(self, footprint):
1090 """Returns whether a Footprint is large
1092 'Large' is defined by thresholds on the area, size and axis ratio,
1093 and total area of the bounding box multiplied by
1094 the number of children.
1095 These may be disabled independently by configuring them to be
1096 non-positive.
1097 """
1098 if self.config.maxFootprintArea > 0 and footprint.getArea() > self.config.maxFootprintArea:
1099 return True
1100 if self.config.maxFootprintSize > 0:
1101 bbox = footprint.getBBox()
1102 if max(bbox.getWidth(), bbox.getHeight()) > self.config.maxFootprintSize:
1103 return True
1104 if self.config.minFootprintAxisRatio > 0:
1105 axes = afwEll.Axes(footprint.getShape())
1106 if axes.getB() < self.config.minFootprintAxisRatio*axes.getA():
1107 return True
1108 if self.config.maxAreaTimesPeaks > 0:
1109 if footprint.getBBox().getArea() * len(footprint.peaks) > self.config.maxAreaTimesPeaks:
1110 return True
1111 return False
1113 def _isMasked(self, footprint, mExposure):
1114 """Returns whether the footprint violates the mask limits
1116 Parameters
1117 ----------
1118 footprint : `lsst.afw.detection.Footprint`
1119 The footprint to check for masked pixels
1120 mMask : `lsst.afw.image.MaskX`
1121 The mask plane to check for masked pixels in the `footprint`.
1123 Returns
1124 -------
1125 isMasked : `bool`
1126 `True` if `self.config.maskPlaneLimits` is less than the
1127 fraction of pixels for a given mask in
1128 `self.config.maskLimits`.
1129 """
1130 bbox = footprint.getBBox()
1131 mask = np.bitwise_or.reduce(mExposure.mask[:, bbox].array, axis=0)
1132 size = float(footprint.getArea())
1133 for maskName, limit in self.config.maskLimits.items():
1134 maskVal = mExposure.mask.getPlaneBitMask(maskName)
1135 _mask = afwImage.MaskX(mask & maskVal, xy0=bbox.getMin())
1136 # spanset of masked pixels
1137 maskedSpan = footprint.spans.intersect(_mask, maskVal)
1138 if (maskedSpan.getArea())/size > limit:
1139 return True
1140 return False
1142 def _skipParent(self, parent, skipKey, logMessage):
1143 """Update a parent record that is not being deblended.
1145 This is a fairly trivial function but is implemented to ensure
1146 that a skipped parent updates the appropriate columns
1147 consistently, and always has a flag to mark the reason that
1148 it is being skipped.
1150 Parameters
1151 ----------
1152 parent : `lsst.afw.table.source.source.SourceRecord`
1153 The parent record to flag as skipped.
1154 skipKey : `bool`
1155 The name of the flag to mark the reason for skipping.
1156 logMessage : `str`
1157 The message to display in a log.trace when a source
1158 is skipped.
1159 """
1160 if logMessage is not None:
1161 self.log.trace(logMessage)
1162 self._updateParentRecord(
1163 parent=parent,
1164 nPeaks=len(parent.getFootprint().peaks),
1165 nChild=0,
1166 nComponents=0,
1167 runtime=np.nan,
1168 iterations=0,
1169 logL=np.nan,
1170 spectrumInit=False,
1171 converged=False,
1172 )
1174 # Mark the source as skipped by the deblender and
1175 # flag the reason why.
1176 parent.set(self.deblendSkippedKey, True)
1177 parent.set(skipKey, True)
1179 def _checkSkipped(self, parent, mExposure):
1180 """Update a parent record that is not being deblended.
1182 This is a fairly trivial function but is implemented to ensure
1183 that a skipped parent updates the appropriate columns
1184 consistently, and always has a flag to mark the reason that
1185 it is being skipped.
1187 Parameters
1188 ----------
1189 parent : `lsst.afw.table.source.source.SourceRecord`
1190 The parent record to flag as skipped.
1191 mExposure : `MultibandExposure`
1192 The exposures should be co-added images of the same
1193 shape and region of the sky.
1194 Returns
1195 -------
1196 skip: `bool`
1197 `True` if the deblender will skip the parent
1198 """
1199 skipKey = None
1200 skipMessage = None
1201 footprint = parent.getFootprint()
1202 if len(footprint.peaks) < 2 and not self.config.processSingles:
1203 # Skip isolated sources unless processSingles is turned on.
1204 # Note: this does not flag isolated sources as skipped or
1205 # set the NOT_DEBLENDED mask in the exposure,
1206 # since these aren't really any skipped blends.
1207 skipKey = self.isolatedParentKey
1208 elif isPseudoSource(parent, self.config.pseudoColumns):
1209 # We also skip pseudo sources, like sky objects, which
1210 # are intended to be skipped.
1211 skipKey = self.pseudoKey
1212 if self._isLargeFootprint(footprint):
1213 # The footprint is above the maximum footprint size limit
1214 skipKey = self.tooBigKey
1215 skipMessage = f"Parent {parent.getId()}: skipping large footprint"
1216 elif self._isMasked(footprint, mExposure):
1217 # The footprint exceeds the maximum number of masked pixels
1218 skipKey = self.maskedKey
1219 skipMessage = f"Parent {parent.getId()}: skipping masked footprint"
1220 elif self.config.maxNumberOfPeaks > 0 and len(footprint.peaks) > self.config.maxNumberOfPeaks:
1221 # Unlike meas_deblender, in scarlet we skip the entire blend
1222 # if the number of peaks exceeds max peaks, since neglecting
1223 # to model any peaks often results in catastrophic failure
1224 # of scarlet to generate models for the brighter sources.
1225 skipKey = self.tooManyPeaksKey
1226 skipMessage = f"Parent {parent.getId()}: skipping blend with too many peaks"
1227 if skipKey is not None:
1228 return (skipKey, skipMessage)
1229 return None
1231 def setSkipFlags(self, mExposure, catalog):
1232 """Set the skip flags for all of the parent sources
1234 This is mostly used for testing which parent sources will be deblended
1235 and which will be skipped based on the current configuration options.
1236 Skipped sources will have the appropriate flags set in place in the
1237 catalog.
1239 Parameters
1240 ----------
1241 mExposure : `MultibandExposure`
1242 The exposures should be co-added images of the same
1243 shape and region of the sky.
1244 catalog : `SourceCatalog`
1245 The merged `SourceCatalog` that contains parent footprints
1246 to (potentially) deblend. The new deblended sources are
1247 appended to this catalog in place.
1248 """
1249 for src in catalog:
1250 if skipArgs := self._checkSkipped(src, mExposure) is not None:
1251 self._skipParent(src, *skipArgs)
1253 def _updateParentRecord(self, parent, nPeaks, nChild, nComponents,
1254 runtime, iterations, logL, spectrumInit, converged):
1255 """Update a parent record in all of the single band catalogs.
1257 Ensure that all locations that update a parent record,
1258 whether it is skipped or updated after deblending,
1259 update all of the appropriate columns.
1261 Parameters
1262 ----------
1263 parent : `lsst.afw.table.source.source.SourceRecord`
1264 The parent record to update.
1265 nPeaks : `int`
1266 Number of peaks in the parent footprint.
1267 nChild : `int`
1268 Number of children deblended from the parent.
1269 This may differ from `nPeaks` if some of the peaks
1270 were culled and have no deblended model.
1271 nComponents : `int`
1272 Total number of components in the parent.
1273 This is usually different than the number of children,
1274 since it is common for a single source to have multiple
1275 components.
1276 runtime : `float`
1277 Total runtime for deblending.
1278 iterations : `int`
1279 Total number of iterations in scarlet before convergence.
1280 logL : `float`
1281 Final log likelihood of the blend.
1282 spectrumInit : `bool`
1283 True when scarlet used `set_spectra` to initialize all
1284 sources with better initial intensities.
1285 converged : `bool`
1286 True when the optimizer reached convergence before
1287 reaching the maximum number of iterations.
1288 """
1289 parent.set(self.nPeaksKey, nPeaks)
1290 parent.set(self.nChildKey, nChild)
1291 parent.set(self.nComponentsKey, nComponents)
1292 parent.set(self.runtimeKey, runtime)
1293 parent.set(self.iterKey, iterations)
1294 parent.set(self.scarletLogLKey, logL)
1295 parent.set(self.scarletSpectrumInitKey, spectrumInit)
1296 parent.set(self.blendConvergenceFailedFlagKey, converged)
1298 def _addChild(self, parent, peak, catalog, scarletSource):
1299 """Add a child to a catalog.
1301 This creates a new child in the source catalog,
1302 assigning it a parent id, and adding all columns
1303 that are independent across all filter bands.
1305 Parameters
1306 ----------
1307 parent : `lsst.afw.table.source.source.SourceRecord`
1308 The parent of the new child record.
1309 peak : `lsst.afw.table.PeakRecord`
1310 The peak record for the peak from the parent peak catalog.
1311 catalog : `lsst.afw.table.source.source.SourceCatalog`
1312 The merged `SourceCatalog` that contains parent footprints
1313 to (potentially) deblend.
1314 scarletSource : `scarlet.Component`
1315 The scarlet model for the new source record.
1316 """
1317 src = catalog.addNew()
1318 for key in self.toCopyFromParent:
1319 src.set(key, parent.get(key))
1320 # The peak catalog is the same for all bands,
1321 # so we just use the first peak catalog
1322 src.assign(peak, self.peakSchemaMapper)
1323 src.setParent(parent.getId())
1324 src.set(self.nPeaksKey, 1)
1325 # Set the psf key based on whether or not the source was
1326 # deblended using the PointSource model.
1327 # This key is not that useful anymore since we now keep track of
1328 # `modelType`, but we continue to propagate it in case code downstream
1329 # is expecting it.
1330 src.set(self.psfKey, scarletSource.__class__.__name__ == "PointSource")
1331 src.set(self.modelTypeKey, scarletSource.__class__.__name__)
1332 # We set the runtime to zero so that summing up the
1333 # runtime column will give the total time spent
1334 # running the deblender for the catalog.
1335 src.set(self.runtimeKey, 0)
1337 # Set the position of the peak from the parent footprint
1338 # This will make it easier to match the same source across
1339 # deblenders and across observations, where the peak
1340 # position is unlikely to change unless enough time passes
1341 # for a source to move on the sky.
1342 src.set(self.peakCenter, geom.Point2I(peak["i_x"], peak["i_y"]))
1343 src.set(self.peakIdKey, peak["id"])
1345 # Store the number of components for the source
1346 src.set(self.nComponentsKey, len(scarletSource.components))
1348 # Flag sources missing one or more bands
1349 src.set(self.incompleteDataKey, parent.get(self.incompleteDataKey))
1351 # Propagate columns from the parent to the child
1352 for parentColumn, childColumn in self.config.columnInheritance.items():
1353 src.set(childColumn, parent.get(parentColumn))
1355 return src