Coverage for python/lsst/meas/extensions/scarlet/scarletDeblendTask.py : 15%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of meas_extensions_scarlet.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
22import logging
23import numpy as np
24import scarlet
25from scarlet.psf import ImagePSF, GaussianPSF
26from scarlet import Blend, Frame, Observation
27from scarlet.renderer import ConvolutionRenderer
28from scarlet.initialization import init_all_sources
30import lsst.log
31import lsst.pex.config as pexConfig
32from lsst.pex.exceptions import InvalidParameterError
33import lsst.pipe.base as pipeBase
34from lsst.geom import Point2I, Box2I, Point2D
35import lsst.afw.geom.ellipses as afwEll
36import lsst.afw.image.utils
37import lsst.afw.image as afwImage
38import lsst.afw.detection as afwDet
39import lsst.afw.table as afwTable
41from .source import modelToHeavy
43# scarlet initialization allows the user to specify the maximum number
44# of components for a source but will fall back to fewer components or
45# an initial PSF morphology depending on the S/N. If either of those happen
46# then scarlet currently warnings that the type of source created by the
47# user was modified. This is not ideal behavior, as it creates a lot of
48# unnecessary warnings for expected behavior and the information is
49# already persisted due to the change in source type.
50# So we silence all of the initialization warnings here to prevent
51# polluting the log files.
52scarletInitLogger = logging.getLogger("scarlet.initialisation")
53scarletSourceLogger = logging.getLogger("scarlet.source")
54scarletInitLogger.setLevel(logging.ERROR)
55scarletSourceLogger.setLevel(logging.ERROR)
57__all__ = ["deblend", "ScarletDeblendConfig", "ScarletDeblendTask"]
59logger = lsst.log.Log.getLogger("meas.deblender.deblend")
62class IncompleteDataError(Exception):
63 """The PSF could not be computed due to incomplete data
64 """
65 pass
68class ScarletGradientError(Exception):
69 """An error occurred during optimization
71 This error occurs when the optimizer encounters
72 a NaN value while calculating the gradient.
73 """
74 def __init__(self, iterations, sources):
75 self.iterations = iterations
76 self.sources = sources
77 msg = ("ScalarGradientError in iteration {0}. "
78 "NaN values introduced in sources {1}")
79 self.message = msg.format(iterations, sources)
81 def __str__(self):
82 return self.message
85def _checkBlendConvergence(blend, f_rel):
86 """Check whether or not a blend has converged
87 """
88 deltaLoss = np.abs(blend.loss[-2] - blend.loss[-1])
89 convergence = f_rel * np.abs(blend.loss[-1])
90 return deltaLoss < convergence
93def _getPsfFwhm(psf):
94 """Calculate the FWHM of the `psf`
95 """
96 return psf.computeShape().getDeterminantRadius() * 2.35
99def _computePsfImage(self, position=None):
100 """Get a multiband PSF image
101 The PSF Kernel Image is computed for each band
102 and combined into a (filter, y, x) array and stored
103 as `self._psfImage`.
104 The result is not cached, so if the same PSF is expected
105 to be used multiple times it is a good idea to store the
106 result in another variable.
107 Note: this is a temporary fix during the deblender sprint.
108 In the future this function will replace the current method
109 in `afw.MultibandExposure.computePsfImage` (DM-19789).
110 Parameters
111 ----------
112 position : `Point2D` or `tuple`
113 Coordinates to evaluate the PSF. If `position` is `None`
114 then `Psf.getAveragePosition()` is used.
115 Returns
116 -------
117 self._psfImage: array
118 The multiband PSF image.
119 """
120 psfs = []
121 # Make the coordinates into a Point2D (if necessary)
122 if not isinstance(position, Point2D) and position is not None:
123 position = Point2D(position[0], position[1])
125 for bidx, single in enumerate(self.singles):
126 try:
127 if position is None:
128 psf = single.getPsf().computeImage()
129 psfs.append(psf)
130 else:
131 psf = single.getPsf().computeKernelImage(position)
132 psfs.append(psf)
133 except InvalidParameterError:
134 # This band failed to compute the PSF due to incomplete data
135 # at that location. This is unlikely to be a problem for Rubin,
136 # however the edges of some HSC COSMOS fields contain incomplete
137 # data in some bands, so we track this error to distinguish it
138 # from unknown errors.
139 msg = "Failed to compute PSF at {} in band {}"
140 raise IncompleteDataError(msg.format(position, self.filters[bidx]))
142 left = np.min([psf.getBBox().getMinX() for psf in psfs])
143 bottom = np.min([psf.getBBox().getMinY() for psf in psfs])
144 right = np.max([psf.getBBox().getMaxX() for psf in psfs])
145 top = np.max([psf.getBBox().getMaxY() for psf in psfs])
146 bbox = Box2I(Point2I(left, bottom), Point2I(right, top))
147 psfs = [afwImage.utils.projectImage(psf, bbox) for psf in psfs]
148 psfImage = afwImage.MultibandImage.fromImages(self.filters, psfs)
149 return psfImage
152def getFootprintMask(footprint, mExposure):
153 """Mask pixels outside the footprint
155 Parameters
156 ----------
157 mExposure : `lsst.image.MultibandExposure`
158 - The multiband exposure containing the image,
159 mask, and variance data
160 footprint : `lsst.detection.Footprint`
161 - The footprint of the parent to deblend
163 Returns
164 -------
165 footprintMask : array
166 Boolean array with pixels not in the footprint set to one.
167 """
168 bbox = footprint.getBBox()
169 fpMask = afwImage.Mask(bbox)
170 footprint.spans.setMask(fpMask, 1)
171 fpMask = ~fpMask.getArray().astype(bool)
172 return fpMask
175def deblend(mExposure, footprint, config):
176 """Deblend a parent footprint
178 Parameters
179 ----------
180 mExposure : `lsst.image.MultibandExposure`
181 - The multiband exposure containing the image,
182 mask, and variance data
183 footprint : `lsst.detection.Footprint`
184 - The footprint of the parent to deblend
185 config : `ScarletDeblendConfig`
186 - Configuration of the deblending task
187 """
188 # Extract coordinates from each MultiColorPeak
189 bbox = footprint.getBBox()
191 # Create the data array from the masked images
192 images = mExposure.image[:, bbox].array
194 # Use the inverse variance as the weights
195 if config.useWeights:
196 weights = 1/mExposure.variance[:, bbox].array
197 else:
198 weights = np.ones_like(images)
199 badPixels = mExposure.mask.getPlaneBitMask(config.badMask)
200 mask = mExposure.mask[:, bbox].array & badPixels
201 weights[mask > 0] = 0
203 # Mask out the pixels outside the footprint
204 mask = getFootprintMask(footprint, mExposure)
205 weights *= ~mask
207 psfs = _computePsfImage(mExposure, footprint.getCentroid()).array.astype(np.float32)
208 psfs = ImagePSF(psfs)
209 model_psf = GaussianPSF(sigma=(config.modelPsfSigma,)*len(mExposure.filters))
211 frame = Frame(images.shape, psf=model_psf, channels=mExposure.filters)
212 observation = Observation(images, psf=psfs, weights=weights, channels=mExposure.filters)
213 if config.convolutionType == "fft":
214 observation.match(frame)
215 elif config.convolutionType == "real":
216 renderer = ConvolutionRenderer(observation, frame, convolution_type="real")
217 observation.match(frame, renderer=renderer)
218 else:
219 raise ValueError("Unrecognized convolution type {}".format(config.convolutionType))
221 assert(config.sourceModel in ["single", "double", "compact", "fit"])
223 # Set the appropriate number of components
224 if config.sourceModel == "single":
225 maxComponents = 1
226 elif config.sourceModel == "double":
227 maxComponents = 2
228 elif config.sourceModel == "compact":
229 maxComponents = 0
230 elif config.sourceModel == "point":
231 raise NotImplementedError("Point source photometry is currently not implemented")
232 elif config.sourceModel == "fit":
233 # It is likely in the future that there will be some heuristic
234 # used to determine what type of model to use for each source,
235 # but that has not yet been implemented (see DM-22551)
236 raise NotImplementedError("sourceModel 'fit' has not been implemented yet")
238 # Convert the centers to pixel coordinates
239 xmin = bbox.getMinX()
240 ymin = bbox.getMinY()
241 centers = [np.array([peak.getIy()-ymin, peak.getIx()-xmin], dtype=int) for peak in footprint.peaks]
243 # Choose whether or not to use the improved spectral initialization
244 if config.setSpectra:
245 if config.maxSpectrumCutoff <= 0:
246 spectrumInit = True
247 else:
248 spectrumInit = len(centers) * bbox.getArea() < config.maxSpectrumCutoff
249 else:
250 spectrumInit = False
252 # Only deblend sources that can be initialized
253 sources, skipped = init_all_sources(
254 frame=frame,
255 centers=centers,
256 observations=observation,
257 thresh=config.morphThresh,
258 max_components=maxComponents,
259 min_snr=config.minSNR,
260 shifting=False,
261 fallback=config.fallback,
262 silent=config.catchFailures,
263 set_spectra=spectrumInit,
264 )
266 # Attach the peak to all of the initialized sources
267 srcIndex = 0
268 for k, center in enumerate(centers):
269 if k not in skipped:
270 # This is just to make sure that there isn't a coding bug
271 assert np.all(sources[srcIndex].center == center)
272 # Store the record for the peak with the appropriate source
273 sources[srcIndex].detectedPeak = footprint.peaks[k]
274 srcIndex += 1
276 # Create the blend and attempt to optimize it
277 blend = Blend(sources, observation)
278 try:
279 blend.fit(max_iter=config.maxIter, e_rel=config.relativeError)
280 except ArithmeticError:
281 # This occurs when a gradient update produces a NaN value
282 # This is usually due to a source initialized with a
283 # negative SED or no flux, often because the peak
284 # is a noise fluctuation in one band and not a real source.
285 iterations = len(blend.loss)
286 failedSources = []
287 for k, src in enumerate(sources):
288 if np.any(~np.isfinite(src.get_model())):
289 failedSources.append(k)
290 raise ScarletGradientError(iterations, failedSources)
292 return blend, skipped, spectrumInit
295class ScarletDeblendConfig(pexConfig.Config):
296 """MultibandDeblendConfig
298 Configuration for the multiband deblender.
299 The parameters are organized by the parameter types, which are
300 - Stopping Criteria: Used to determine if the fit has converged
301 - Position Fitting Criteria: Used to fit the positions of the peaks
302 - Constraints: Used to apply constraints to the peaks and their components
303 - Other: Parameters that don't fit into the above categories
304 """
305 # Stopping Criteria
306 maxIter = pexConfig.Field(dtype=int, default=300,
307 doc=("Maximum number of iterations to deblend a single parent"))
308 relativeError = pexConfig.Field(dtype=float, default=1e-4,
309 doc=("Change in the loss function between"
310 "iterations to exit fitter"))
312 # Constraints
313 morphThresh = pexConfig.Field(dtype=float, default=1,
314 doc="Fraction of background RMS a pixel must have"
315 "to be included in the initial morphology")
316 # Other scarlet paremeters
317 useWeights = pexConfig.Field(
318 dtype=bool, default=True,
319 doc=("Whether or not use use inverse variance weighting."
320 "If `useWeights` is `False` then flat weights are used"))
321 modelPsfSize = pexConfig.Field(
322 dtype=int, default=11,
323 doc="Model PSF side length in pixels")
324 modelPsfSigma = pexConfig.Field(
325 dtype=float, default=0.8,
326 doc="Define sigma for the model frame PSF")
327 minSNR = pexConfig.Field(
328 dtype=float, default=50,
329 doc="Minimum Signal to noise to accept the source."
330 "Sources with lower flux will be initialized with the PSF but updated "
331 "like an ordinary ExtendedSource (known in scarlet as a `CompactSource`).")
332 saveTemplates = pexConfig.Field(
333 dtype=bool, default=True,
334 doc="Whether or not to save the SEDs and templates")
335 processSingles = pexConfig.Field(
336 dtype=bool, default=True,
337 doc="Whether or not to process isolated sources in the deblender")
338 convolutionType = pexConfig.Field(
339 dtype=str, default="fft",
340 doc="Type of convolution to render the model to the observations.\n"
341 "- 'fft': perform convolutions in Fourier space\n"
342 "- 'real': peform convolutions in real space.")
343 sourceModel = pexConfig.Field(
344 dtype=str, default="double",
345 doc=("How to determine which model to use for sources, from\n"
346 "- 'single': use a single component for all sources\n"
347 "- 'double': use a bulge disk model for all sources\n"
348 "- 'compact': use a single component model, initialzed with a point source morphology, "
349 " for all sources\n"
350 "- 'point': use a point-source model for all sources\n"
351 "- 'fit: use a PSF fitting model to determine the number of components (not yet implemented)")
352 )
353 setSpectra = pexConfig.Field(
354 dtype=bool, default=True,
355 doc="Whether or not to solve for the best-fit spectra during initialization. "
356 "This makes initialization slightly longer, as it requires a convolution "
357 "to set the optimal spectra, but results in a much better initial log-likelihood "
358 "and reduced total runtime, with convergence in fewer iterations."
359 "This option is only used when "
360 "peaks*area < `maxSpectrumCutoff` will use the improved initialization.")
362 # Mask-plane restrictions
363 badMask = pexConfig.ListField(
364 dtype=str, default=["BAD", "CR", "NO_DATA", "SAT", "SUSPECT", "EDGE"],
365 doc="Whether or not to process isolated sources in the deblender")
366 statsMask = pexConfig.ListField(dtype=str, default=["SAT", "INTRP", "NO_DATA"],
367 doc="Mask planes to ignore when performing statistics")
368 maskLimits = pexConfig.DictField(
369 keytype=str,
370 itemtype=float,
371 default={},
372 doc=("Mask planes with the corresponding limit on the fraction of masked pixels. "
373 "Sources violating this limit will not be deblended."),
374 )
376 # Size restrictions
377 maxNumberOfPeaks = pexConfig.Field(
378 dtype=int, default=0,
379 doc=("Only deblend the brightest maxNumberOfPeaks peaks in the parent"
380 " (<= 0: unlimited)"))
381 maxFootprintArea = pexConfig.Field(
382 dtype=int, default=1000000,
383 doc=("Maximum area for footprints before they are ignored as large; "
384 "non-positive means no threshold applied"))
385 maxFootprintSize = pexConfig.Field(
386 dtype=int, default=0,
387 doc=("Maximum linear dimension for footprints before they are ignored "
388 "as large; non-positive means no threshold applied"))
389 minFootprintAxisRatio = pexConfig.Field(
390 dtype=float, default=0.0,
391 doc=("Minimum axis ratio for footprints before they are ignored "
392 "as large; non-positive means no threshold applied"))
393 maxSpectrumCutoff = pexConfig.Field(
394 dtype=int, default=1000000,
395 doc=("Maximum number of pixels * number of sources in a blend. "
396 "This is different than `maxFootprintArea` because this isn't "
397 "the footprint area but the area of the bounding box that "
398 "contains the footprint, and is also multiplied by the number of"
399 "sources in the footprint. This prevents large skinny blends with "
400 "a high density of sources from running out of memory. "
401 "If `maxSpectrumCutoff == -1` then there is no cutoff.")
402 )
404 # Failure modes
405 fallback = pexConfig.Field(
406 dtype=bool, default=True,
407 doc="Whether or not to fallback to a smaller number of components if a source does not initialize"
408 )
409 notDeblendedMask = pexConfig.Field(
410 dtype=str, default="NOT_DEBLENDED", optional=True,
411 doc="Mask name for footprints not deblended, or None")
412 catchFailures = pexConfig.Field(
413 dtype=bool, default=True,
414 doc=("If True, catch exceptions thrown by the deblender, log them, "
415 "and set a flag on the parent, instead of letting them propagate up"))
418class ScarletDeblendTask(pipeBase.Task):
419 """ScarletDeblendTask
421 Split blended sources into individual sources.
423 This task has no return value; it only modifies the SourceCatalog in-place.
424 """
425 ConfigClass = ScarletDeblendConfig
426 _DefaultName = "scarletDeblend"
428 def __init__(self, schema, peakSchema=None, **kwargs):
429 """Create the task, adding necessary fields to the given schema.
431 Parameters
432 ----------
433 schema : `lsst.afw.table.schema.schema.Schema`
434 Schema object for measurement fields; will be modified in-place.
435 peakSchema : `lsst.afw.table.schema.schema.Schema`
436 Schema of Footprint Peaks that will be passed to the deblender.
437 Any fields beyond the PeakTable minimal schema will be transferred
438 to the main source Schema. If None, no fields will be transferred
439 from the Peaks.
440 filters : list of str
441 Names of the filters used for the eposures. This is needed to store
442 the SED as a field
443 **kwargs
444 Passed to Task.__init__.
445 """
446 pipeBase.Task.__init__(self, **kwargs)
448 peakMinimalSchema = afwDet.PeakTable.makeMinimalSchema()
449 if peakSchema is None:
450 # In this case, the peakSchemaMapper will transfer nothing, but
451 # we'll still have one
452 # to simplify downstream code
453 self.peakSchemaMapper = afwTable.SchemaMapper(peakMinimalSchema, schema)
454 else:
455 self.peakSchemaMapper = afwTable.SchemaMapper(peakSchema, schema)
456 for item in peakSchema:
457 if item.key not in peakMinimalSchema:
458 self.peakSchemaMapper.addMapping(item.key, item.field)
459 # Because SchemaMapper makes a copy of the output schema
460 # you give its ctor, it isn't updating this Schema in
461 # place. That's probably a design flaw, but in the
462 # meantime, we'll keep that schema in sync with the
463 # peakSchemaMapper.getOutputSchema() manually, by adding
464 # the same fields to both.
465 schema.addField(item.field)
466 assert schema == self.peakSchemaMapper.getOutputSchema(), "Logic bug mapping schemas"
467 self._addSchemaKeys(schema)
468 self.schema = schema
469 self.toCopyFromParent = [item.key for item in self.schema
470 if item.field.getName().startswith("merge_footprint")]
472 def _addSchemaKeys(self, schema):
473 """Add deblender specific keys to the schema
474 """
475 self.runtimeKey = schema.addField('deblend_runtime', type=np.float32, doc='runtime in ms')
477 self.iterKey = schema.addField('deblend_iterations', type=np.int32, doc='iterations to converge')
479 self.nChildKey = schema.addField('deblend_nChild', type=np.int32,
480 doc='Number of children this object has (defaults to 0)')
481 self.psfKey = schema.addField('deblend_deblendedAsPsf', type='Flag',
482 doc='Deblender thought this source looked like a PSF')
483 self.tooManyPeaksKey = schema.addField('deblend_tooManyPeaks', type='Flag',
484 doc='Source had too many peaks; '
485 'only the brightest were included')
486 self.tooBigKey = schema.addField('deblend_parentTooBig', type='Flag',
487 doc='Parent footprint covered too many pixels')
488 self.maskedKey = schema.addField('deblend_masked', type='Flag',
489 doc='Parent footprint was predominantly masked')
490 self.sedNotConvergedKey = schema.addField('deblend_sedConvergenceFailed', type='Flag',
491 doc='scarlet sed optimization did not converge before'
492 'config.maxIter')
493 self.morphNotConvergedKey = schema.addField('deblend_morphConvergenceFailed', type='Flag',
494 doc='scarlet morph optimization did not converge before'
495 'config.maxIter')
496 self.blendConvergenceFailedFlagKey = schema.addField('deblend_blendConvergenceFailedFlag',
497 type='Flag',
498 doc='at least one source in the blend'
499 'failed to converge')
500 self.edgePixelsKey = schema.addField('deblend_edgePixels', type='Flag',
501 doc='Source had flux on the edge of the parent footprint')
502 self.deblendFailedKey = schema.addField('deblend_failed', type='Flag',
503 doc="Deblending failed on source")
504 self.deblendErrorKey = schema.addField('deblend_error', type="String", size=25,
505 doc='Name of error if the blend failed')
506 self.deblendSkippedKey = schema.addField('deblend_skipped', type='Flag',
507 doc="Deblender skipped this source")
508 self.peakCenter = afwTable.Point2IKey.addFields(schema, name="deblend_peak_center",
509 doc="Center used to apply constraints in scarlet",
510 unit="pixel")
511 self.peakIdKey = schema.addField("deblend_peakId", type=np.int32,
512 doc="ID of the peak in the parent footprint. "
513 "This is not unique, but the combination of 'parent'"
514 "and 'peakId' should be for all child sources. "
515 "Top level blends with no parents have 'peakId=0'")
516 self.modelCenterFlux = schema.addField('deblend_peak_instFlux', type=float, units='count',
517 doc="The instFlux at the peak position of deblended mode")
518 self.modelTypeKey = schema.addField("deblend_modelType", type="String", size=25,
519 doc="The type of model used, for example "
520 "MultiExtendedSource, SingleExtendedSource, PointSource")
521 self.nPeaksKey = schema.addField("deblend_nPeaks", type=np.int32,
522 doc="Number of initial peaks in the blend. "
523 "This includes peaks that may have been culled "
524 "during deblending or failed to deblend")
525 self.parentNPeaksKey = schema.addField("deblend_parentNPeaks", type=np.int32,
526 doc="Same as deblend_n_peaks, but the number of peaks "
527 "in the parent footprint")
528 self.scarletFluxKey = schema.addField("deblend_scarletFlux", type=np.float32,
529 doc="Flux measurement from scarlet")
530 self.scarletLogLKey = schema.addField("deblend_logL", type=np.float32,
531 doc="Final logL, used to identify regressions in scarlet.")
532 self.scarletSpectrumInitKey = schema.addField("deblend_spectrumInitFlag", type='Flag',
533 doc="True when scarlet initializes sources "
534 "in the blend with a more accurate spectrum. "
535 "The algorithm uses a lot of memory, "
536 "so large dense blends will use "
537 "a less accurate initialization.")
539 # self.log.trace('Added keys to schema: %s', ", ".join(str(x) for x in
540 # (self.nChildKey, self.tooManyPeaksKey, self.tooBigKey))
541 # )
543 @pipeBase.timeMethod
544 def run(self, mExposure, mergedSources):
545 """Get the psf from each exposure and then run deblend().
547 Parameters
548 ----------
549 mExposure : `MultibandExposure`
550 The exposures should be co-added images of the same
551 shape and region of the sky.
552 mergedSources : `SourceCatalog`
553 The merged `SourceCatalog` that contains parent footprints
554 to (potentially) deblend.
556 Returns
557 -------
558 templateCatalogs: dict
559 Keys are the names of the filters and the values are
560 `lsst.afw.table.source.source.SourceCatalog`'s.
561 These are catalogs with heavy footprints that are the templates
562 created by the multiband templates.
563 """
564 return self.deblend(mExposure, mergedSources)
566 @pipeBase.timeMethod
567 def deblend(self, mExposure, sources):
568 """Deblend a data cube of multiband images
570 Parameters
571 ----------
572 mExposure : `MultibandExposure`
573 The exposures should be co-added images of the same
574 shape and region of the sky.
575 sources : `SourceCatalog`
576 The merged `SourceCatalog` that contains parent footprints
577 to (potentially) deblend.
579 Returns
580 -------
581 templateCatalogs : dict or None
582 Keys are the names of the filters and the values are
583 `lsst.afw.table.source.source.SourceCatalog`'s.
584 These are catalogs with heavy footprints that are the templates
585 created by the multiband templates.
586 """
587 import time
589 filters = mExposure.filters
590 self.log.info("Deblending {0} sources in {1} exposure bands".format(len(sources), len(mExposure)))
592 # Create the output catalogs
593 templateCatalogs = {}
594 # This must be returned but is not calculated right now, setting it to
595 # None to be consistent with doc string
596 for f in filters:
597 _catalog = afwTable.SourceCatalog(sources.table.clone())
598 _catalog.extend(sources)
599 templateCatalogs[f] = _catalog
601 n0 = len(sources)
602 nparents = 0
603 for pk, src in enumerate(sources):
604 foot = src.getFootprint()
605 bbox = foot.getBBox()
606 peaks = foot.getPeaks()
608 # Since we use the first peak for the parent object, we should
609 # propagate its flags to the parent source.
610 src.assign(peaks[0], self.peakSchemaMapper)
612 # Block of Skipping conditions
613 if len(peaks) < 2 and not self.config.processSingles:
614 for f in filters:
615 templateCatalogs[f][pk].set(self.runtimeKey, 0)
616 continue
617 if self._isLargeFootprint(foot):
618 src.set(self.tooBigKey, True)
619 self._skipParent(src, mExposure.mask)
620 self.log.trace('Parent %i: skipping large footprint', int(src.getId()))
621 continue
622 if self._isMasked(foot, mExposure):
623 src.set(self.maskedKey, True)
624 mask = np.bitwise_or.reduce(mExposure.mask[:, bbox].array, axis=0)
625 mask = afwImage.MaskX(mask, xy0=bbox.getMin())
626 self._skipParent(src, mask)
627 self.log.trace('Parent %i: skipping masked footprint', int(src.getId()))
628 continue
629 if self.config.maxNumberOfPeaks > 0 and len(peaks) > self.config.maxNumberOfPeaks:
630 src.set(self.tooManyPeaksKey, True)
631 self._skipParent(src, mExposure.mask)
632 msg = 'Parent {0}: Too many peaks, skipping blend'
633 self.log.trace(msg.format(int(src.getId())))
634 # Unlike meas_deblender, in scarlet we skip the entire blend
635 # if the number of peaks exceeds max peaks, since neglecting
636 # to model any peaks often results in catastrophic failure
637 # of scarlet to generate models for the brighter sources.
638 continue
640 nparents += 1
641 self.log.trace('Parent %i: deblending %i peaks', int(src.getId()), len(peaks))
642 # Run the deblender
643 blendError = None
644 try:
645 t0 = time.time()
646 # Build the parameter lists with the same ordering
647 blend, skipped, spectrumInit = deblend(mExposure, foot, self.config)
648 tf = time.time()
649 runtime = (tf-t0)*1000
650 src.set(self.deblendFailedKey, False)
651 src.set(self.runtimeKey, runtime)
652 src.set(self.scarletSpectrumInitKey, spectrumInit)
653 converged = _checkBlendConvergence(blend, self.config.relativeError)
654 src.set(self.blendConvergenceFailedFlagKey, converged)
655 sources = [src for src in blend.sources]
656 # Re-insert place holders for skipped sources
657 # to propagate them in the catalog so
658 # that the peaks stay consistent
659 for k in skipped:
660 sources.insert(k, None)
661 # Catch all errors and filter out the ones that we know about
662 except Exception as e:
663 blendError = type(e).__name__
664 if isinstance(e, ScarletGradientError):
665 src.set(self.iterKey, e.iterations)
666 elif not isinstance(e, IncompleteDataError):
667 blendError = "UnknownError"
668 self._skipParent(src, mExposure.mask)
669 if self.config.catchFailures:
670 # Make it easy to find UnknownErrors in the log file
671 self.log.warn("UnknownError")
672 import traceback
673 traceback.print_exc()
674 else:
675 raise
677 self.log.warn("Unable to deblend source %d: %s" % (src.getId(), blendError))
678 src.set(self.deblendFailedKey, True)
679 src.set(self.deblendErrorKey, blendError)
680 self._skipParent(src, mExposure.mask)
681 continue
683 # Add the merged source as a parent in the catalog for each band
684 templateParents = {}
685 parentId = src.getId()
686 for f in filters:
687 templateParents[f] = templateCatalogs[f][pk]
688 templateParents[f].set(self.nPeaksKey, len(foot.peaks))
689 templateParents[f].set(self.runtimeKey, runtime)
690 templateParents[f].set(self.iterKey, len(blend.loss))
691 logL = blend.loss[-1]-blend.observations[0].log_norm
692 templateParents[f].set(self.scarletLogLKey, logL)
694 # Add each source to the catalogs in each band
695 nchild = 0
696 for k, source in enumerate(sources):
697 # Skip any sources with no flux or that scarlet skipped because
698 # it could not initialize
699 if k in skipped:
700 # No need to propagate anything
701 continue
702 else:
703 src.set(self.deblendSkippedKey, False)
704 models = modelToHeavy(source, filters, xy0=bbox.getMin(),
705 observation=blend.observations[0])
707 flux = scarlet.measure.flux(source)
708 for fidx, f in enumerate(filters):
709 if len(models[f].getPeaks()) != 1:
710 err = "Heavy footprint should have a single peak, got {0}"
711 raise ValueError(err.format(len(models[f].peaks)))
712 cat = templateCatalogs[f]
713 child = self._addChild(src, cat, models[f], source, converged,
714 xy0=bbox.getMin(), flux=flux[fidx])
715 if parentId == 0:
716 child.setId(src.getId())
717 child.set(self.runtimeKey, runtime)
718 nchild += 1
720 # Set the number of children for each parent
721 for f in filters:
722 templateParents[f].set(self.nChildKey, nchild)
724 K = len(list(templateCatalogs.values())[0])
725 self.log.info('Deblended: of %i sources, %i were deblended, creating %i children, total %i sources'
726 % (n0, nparents, K-n0, K))
727 return templateCatalogs
729 def _isLargeFootprint(self, footprint):
730 """Returns whether a Footprint is large
732 'Large' is defined by thresholds on the area, size and axis ratio.
733 These may be disabled independently by configuring them to be
734 non-positive.
736 This is principally intended to get rid of satellite streaks, which the
737 deblender or other downstream processing can have trouble dealing with
738 (e.g., multiple large HeavyFootprints can chew up memory).
739 """
740 if self.config.maxFootprintArea > 0 and footprint.getArea() > self.config.maxFootprintArea:
741 return True
742 if self.config.maxFootprintSize > 0:
743 bbox = footprint.getBBox()
744 if max(bbox.getWidth(), bbox.getHeight()) > self.config.maxFootprintSize:
745 return True
746 if self.config.minFootprintAxisRatio > 0:
747 axes = afwEll.Axes(footprint.getShape())
748 if axes.getB() < self.config.minFootprintAxisRatio*axes.getA():
749 return True
750 return False
752 def _isMasked(self, footprint, mExposure):
753 """Returns whether the footprint violates the mask limits"""
754 bbox = footprint.getBBox()
755 mask = np.bitwise_or.reduce(mExposure.mask[:, bbox].array, axis=0)
756 size = float(footprint.getArea())
757 for maskName, limit in self.config.maskLimits.items():
758 maskVal = mExposure.mask.getPlaneBitMask(maskName)
759 _mask = afwImage.MaskX(mask & maskVal, xy0=bbox.getMin())
760 unmaskedSpan = footprint.spans.intersectNot(_mask) # spanset of unmasked pixels
761 if (size - unmaskedSpan.getArea())/size > limit:
762 return True
763 return False
765 def _skipParent(self, source, masks):
766 """Indicate that the parent source is not being deblended
768 We set the appropriate flags and masks for each exposure.
770 Parameters
771 ----------
772 source : `lsst.afw.table.source.source.SourceRecord`
773 The source to flag as skipped
774 masks : list of `lsst.afw.image.MaskX`
775 The mask in each band to update with the non-detection
776 """
777 fp = source.getFootprint()
778 source.set(self.deblendSkippedKey, True)
779 if self.config.notDeblendedMask:
780 for mask in masks:
781 mask.addMaskPlane(self.config.notDeblendedMask)
782 fp.spans.setMask(mask, mask.getPlaneBitMask(self.config.notDeblendedMask))
783 # The deblender didn't run on this source, so it has zero runtime
784 source.set(self.runtimeKey, 0)
785 # Set the center of the parent
786 bbox = fp.getBBox()
787 centerX = int(bbox.getMinX()+bbox.getWidth()/2)
788 centerY = int(bbox.getMinY()+bbox.getHeight()/2)
789 source.set(self.peakCenter, Point2I(centerX, centerY))
790 # There are no deblended children, so nChild = 0
791 source.set(self.nChildKey, 0)
792 # But we also want to know how many peaks that we would have
793 # deblended if the parent wasn't skipped.
794 source.set(self.nPeaksKey, len(fp.peaks))
795 # The blend was skipped, so it didn't take any iterations
796 source.set(self.iterKey, 0)
797 # Top level parents are not a detected peak, so they have no peakId
798 source.set(self.peakIdKey, 0)
799 # Top level parents also have no parentNPeaks
800 source.set(self.parentNPeaksKey, 0)
802 def _addChild(self, parent, sources, heavy, scarletSource, blend_converged, xy0, flux):
803 """Add a child to a catalog
805 This creates a new child in the source catalog,
806 assigning it a parent id, adding a footprint,
807 and setting all appropriate flags based on the
808 deblender result.
809 """
810 assert len(heavy.getPeaks()) == 1
811 src = sources.addNew()
812 for key in self.toCopyFromParent:
813 src.set(key, parent.get(key))
814 src.assign(heavy.getPeaks()[0], self.peakSchemaMapper)
815 src.setParent(parent.getId())
816 src.setFootprint(heavy)
817 # Set the psf key based on whether or not the source was
818 # deblended using the PointSource model.
819 # This key is not that useful anymore since we now keep track of
820 # `modelType`, but we continue to propagate it in case code downstream
821 # is expecting it.
822 src.set(self.psfKey, scarletSource.__class__.__name__ == "PointSource")
823 src.set(self.runtimeKey, 0)
824 src.set(self.blendConvergenceFailedFlagKey, not blend_converged)
826 # Set the position of the peak from the parent footprint
827 # This will make it easier to match the same source across
828 # deblenders and across observations, where the peak
829 # position is unlikely to change unless enough time passes
830 # for a source to move on the sky.
831 peak = scarletSource.detectedPeak
832 src.set(self.peakCenter, Point2I(peak["i_x"], peak["i_y"]))
833 src.set(self.peakIdKey, peak["id"])
835 # The children have a single peak
836 src.set(self.nPeaksKey, 1)
838 # Store the flux at the center of the model and the total
839 # scarlet flux measurement.
840 morph = afwDet.multiband.heavyFootprintToImage(heavy).image.array
842 # Set the flux at the center of the model (for SNR)
843 try:
844 cy, cx = scarletSource.center
845 cy = np.max([np.min([int(np.round(cy)), morph.shape[0]-1]), 0])
846 cx = np.max([np.min([int(np.round(cx)), morph.shape[1]-1]), 0])
847 src.set(self.modelCenterFlux, morph[cy, cx])
848 except AttributeError:
849 msg = "Did not recognize coordinates for source type of `{0}`, "
850 msg += "could not write coordinates or center flux. "
851 msg += "Add `{0}` to meas_extensions_scarlet to properly persist this information."
852 logger.warning(msg.format(type(scarletSource)))
854 src.set(self.modelTypeKey, scarletSource.__class__.__name__)
855 # Include the source flux in the model space in the catalog.
856 # This uses the narrower model PSF, which ensures that all sources
857 # not located on an edge have all of their flux included in the
858 # measurement.
859 src.set(self.scarletFluxKey, flux)
861 # Set the spectrum init flag from the parent
862 src.set(self.scarletSpectrumInitKey, parent.get(self.scarletSpectrumInitKey))
863 return src