Coverage for python/lsst/meas/extensions/scarlet/scarletDeblendTask.py : 14%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of meas_extensions_scarlet.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
22import numpy as np
23import scarlet
24from scarlet.psf import ImagePSF, GaussianPSF
25from scarlet import Blend, Frame, Observation
26from scarlet.renderer import ConvolutionRenderer
27from scarlet.initialization import init_all_sources
29import lsst.log
30import lsst.pex.config as pexConfig
31from lsst.pex.exceptions import InvalidParameterError
32import lsst.pipe.base as pipeBase
33from lsst.geom import Point2I, Box2I, Point2D
34import lsst.afw.geom.ellipses as afwEll
35import lsst.afw.image.utils
36import lsst.afw.image as afwImage
37import lsst.afw.detection as afwDet
38import lsst.afw.table as afwTable
40from .source import modelToHeavy
42__all__ = ["deblend", "ScarletDeblendConfig", "ScarletDeblendTask"]
44logger = lsst.log.Log.getLogger("meas.deblender.deblend")
47class IncompleteDataError(Exception):
48 """The PSF could not be computed due to incomplete data
49 """
50 pass
53class ScarletGradientError(Exception):
54 """An error occurred during optimization
56 This error occurs when the optimizer encounters
57 a NaN value while calculating the gradient.
58 """
59 def __init__(self, iterations, sources):
60 self.iterations = iterations
61 self.sources = sources
62 msg = ("ScalarGradientError in iteration {0}. "
63 "NaN values introduced in sources {1}")
64 self.message = msg.format(iterations, sources)
66 def __str__(self):
67 return self.message
70def _checkBlendConvergence(blend, f_rel):
71 """Check whether or not a blend has converged
72 """
73 deltaLoss = np.abs(blend.loss[-2] - blend.loss[-1])
74 convergence = f_rel * np.abs(blend.loss[-1])
75 return deltaLoss < convergence
78def _getPsfFwhm(psf):
79 """Calculate the FWHM of the `psf`
80 """
81 return psf.computeShape().getDeterminantRadius() * 2.35
84def _computePsfImage(self, position=None):
85 """Get a multiband PSF image
86 The PSF Kernel Image is computed for each band
87 and combined into a (filter, y, x) array and stored
88 as `self._psfImage`.
89 The result is not cached, so if the same PSF is expected
90 to be used multiple times it is a good idea to store the
91 result in another variable.
92 Note: this is a temporary fix during the deblender sprint.
93 In the future this function will replace the current method
94 in `afw.MultibandExposure.computePsfImage` (DM-19789).
95 Parameters
96 ----------
97 position : `Point2D` or `tuple`
98 Coordinates to evaluate the PSF. If `position` is `None`
99 then `Psf.getAveragePosition()` is used.
100 Returns
101 -------
102 self._psfImage: array
103 The multiband PSF image.
104 """
105 psfs = []
106 # Make the coordinates into a Point2D (if necessary)
107 if not isinstance(position, Point2D) and position is not None:
108 position = Point2D(position[0], position[1])
110 for bidx, single in enumerate(self.singles):
111 try:
112 if position is None:
113 psf = single.getPsf().computeImage()
114 psfs.append(psf)
115 else:
116 psf = single.getPsf().computeKernelImage(position)
117 psfs.append(psf)
118 except InvalidParameterError:
119 # This band failed to compute the PSF due to incomplete data
120 # at that location. This is unlikely to be a problem for Rubin,
121 # however the edges of some HSC COSMOS fields contain incomplete
122 # data in some bands, so we track this error to distinguish it
123 # from unknown errors.
124 msg = "Failed to compute PSF at {} in band {}"
125 raise IncompleteDataError(msg.format(position, self.filters[bidx]))
127 left = np.min([psf.getBBox().getMinX() for psf in psfs])
128 bottom = np.min([psf.getBBox().getMinY() for psf in psfs])
129 right = np.max([psf.getBBox().getMaxX() for psf in psfs])
130 top = np.max([psf.getBBox().getMaxY() for psf in psfs])
131 bbox = Box2I(Point2I(left, bottom), Point2I(right, top))
132 psfs = [afwImage.utils.projectImage(psf, bbox) for psf in psfs]
133 psfImage = afwImage.MultibandImage.fromImages(self.filters, psfs)
134 return psfImage
137def getFootprintMask(footprint, mExposure):
138 """Mask pixels outside the footprint
140 Parameters
141 ----------
142 mExposure : `lsst.image.MultibandExposure`
143 - The multiband exposure containing the image,
144 mask, and variance data
145 footprint : `lsst.detection.Footprint`
146 - The footprint of the parent to deblend
148 Returns
149 -------
150 footprintMask : array
151 Boolean array with pixels not in the footprint set to one.
152 """
153 bbox = footprint.getBBox()
154 fpMask = afwImage.Mask(bbox)
155 footprint.spans.setMask(fpMask, 1)
156 fpMask = ~fpMask.getArray().astype(bool)
157 return fpMask
160def deblend(mExposure, footprint, config):
161 """Deblend a parent footprint
163 Parameters
164 ----------
165 mExposure : `lsst.image.MultibandExposure`
166 - The multiband exposure containing the image,
167 mask, and variance data
168 footprint : `lsst.detection.Footprint`
169 - The footprint of the parent to deblend
170 config : `ScarletDeblendConfig`
171 - Configuration of the deblending task
172 """
173 # Extract coordinates from each MultiColorPeak
174 bbox = footprint.getBBox()
176 # Create the data array from the masked images
177 images = mExposure.image[:, bbox].array
179 # Use the inverse variance as the weights
180 if config.useWeights:
181 weights = 1/mExposure.variance[:, bbox].array
182 else:
183 weights = np.ones_like(images)
184 badPixels = mExposure.mask.getPlaneBitMask(config.badMask)
185 mask = mExposure.mask[:, bbox].array & badPixels
186 weights[mask > 0] = 0
188 # Mask out the pixels outside the footprint
189 mask = getFootprintMask(footprint, mExposure)
190 weights *= ~mask
192 psfs = _computePsfImage(mExposure, footprint.getCentroid()).array.astype(np.float32)
193 psfs = ImagePSF(psfs)
194 model_psf = GaussianPSF(sigma=(config.modelPsfSigma,)*len(mExposure.filters))
196 frame = Frame(images.shape, psf=model_psf, channels=mExposure.filters)
197 observation = Observation(images, psf=psfs, weights=weights, channels=mExposure.filters)
198 if config.convolutionType == "fft":
199 observation.match(frame)
200 elif config.convolutionType == "real":
201 renderer = ConvolutionRenderer(observation, frame, convolution_type="real")
202 observation.match(frame, renderer=renderer)
203 else:
204 raise ValueError("Unrecognized convolution type {}".format(config.convolutionType))
206 assert(config.sourceModel in ["single", "double", "compact", "fit"])
208 # Set the appropriate number of components
209 if config.sourceModel == "single":
210 maxComponents = 1
211 elif config.sourceModel == "double":
212 maxComponents = 2
213 elif config.sourceModel == "compact":
214 maxComponents = 0
215 elif config.sourceModel == "point":
216 raise NotImplementedError("Point source photometry is currently not implemented")
217 elif config.sourceModel == "fit":
218 # It is likely in the future that there will be some heuristic
219 # used to determine what type of model to use for each source,
220 # but that has not yet been implemented (see DM-22551)
221 raise NotImplementedError("sourceModel 'fit' has not been implemented yet")
223 # Convert the centers to pixel coordinates
224 xmin = bbox.getMinX()
225 ymin = bbox.getMinY()
226 centers = [np.array([peak.getIy()-ymin, peak.getIx()-xmin], dtype=int) for peak in footprint.peaks]
228 # Only deblend sources that can be initialized
229 sources, skipped = init_all_sources(
230 frame=frame,
231 centers=centers,
232 observations=observation,
233 thresh=config.morphThresh,
234 max_components=maxComponents,
235 min_snr=config.minSNR,
236 shifting=False,
237 fallback=config.fallback,
238 silent=config.catchFailures,
239 set_spectra=config.setSpectra,
240 )
242 # Attach the peak to all of the initialized sources
243 srcIndex = 0
244 for k, center in enumerate(centers):
245 if k not in skipped:
246 # This is just to make sure that there isn't a coding bug
247 assert np.all(sources[srcIndex].center == center)
248 # Store the record for the peak with the appropriate source
249 sources[srcIndex].detectedPeak = footprint.peaks[k]
250 srcIndex += 1
252 # Create the blend and attempt to optimize it
253 blend = Blend(sources, observation)
254 try:
255 blend.fit(max_iter=config.maxIter, e_rel=config.relativeError)
256 except ArithmeticError:
257 # This occurs when a gradient update produces a NaN value
258 # This is usually due to a source initialized with a
259 # negative SED or no flux, often because the peak
260 # is a noise fluctuation in one band and not a real source.
261 iterations = len(blend.loss)
262 failedSources = []
263 for k, src in enumerate(sources):
264 if np.any(~np.isfinite(src.get_model())):
265 failedSources.append(k)
266 raise ScarletGradientError(iterations, failedSources)
268 return blend, skipped
271class ScarletDeblendConfig(pexConfig.Config):
272 """MultibandDeblendConfig
274 Configuration for the multiband deblender.
275 The parameters are organized by the parameter types, which are
276 - Stopping Criteria: Used to determine if the fit has converged
277 - Position Fitting Criteria: Used to fit the positions of the peaks
278 - Constraints: Used to apply constraints to the peaks and their components
279 - Other: Parameters that don't fit into the above categories
280 """
281 # Stopping Criteria
282 maxIter = pexConfig.Field(dtype=int, default=300,
283 doc=("Maximum number of iterations to deblend a single parent"))
284 relativeError = pexConfig.Field(dtype=float, default=1e-4,
285 doc=("Change in the loss function between"
286 "iterations to exit fitter"))
288 # Constraints
289 morphThresh = pexConfig.Field(dtype=float, default=1,
290 doc="Fraction of background RMS a pixel must have"
291 "to be included in the initial morphology")
292 # Other scarlet paremeters
293 useWeights = pexConfig.Field(
294 dtype=bool, default=True,
295 doc=("Whether or not use use inverse variance weighting."
296 "If `useWeights` is `False` then flat weights are used"))
297 modelPsfSize = pexConfig.Field(
298 dtype=int, default=11,
299 doc="Model PSF side length in pixels")
300 modelPsfSigma = pexConfig.Field(
301 dtype=float, default=0.8,
302 doc="Define sigma for the model frame PSF")
303 minSNR = pexConfig.Field(
304 dtype=float, default=50,
305 doc="Minimum Signal to noise to accept the source."
306 "Sources with lower flux will be initialized with the PSF but updated "
307 "like an ordinary ExtendedSource (known in scarlet as a `CompactSource`).")
308 saveTemplates = pexConfig.Field(
309 dtype=bool, default=True,
310 doc="Whether or not to save the SEDs and templates")
311 processSingles = pexConfig.Field(
312 dtype=bool, default=True,
313 doc="Whether or not to process isolated sources in the deblender")
314 convolutionType = pexConfig.Field(
315 dtype=str, default="fft",
316 doc="Type of convolution to render the model to the observations.\n"
317 "- 'fft': perform convolutions in Fourier space\n"
318 "- 'real': peform convolutions in real space.")
319 sourceModel = pexConfig.Field(
320 dtype=str, default="double",
321 doc=("How to determine which model to use for sources, from\n"
322 "- 'single': use a single component for all sources\n"
323 "- 'double': use a bulge disk model for all sources\n"
324 "- 'compact': use a single component model, initialzed with a point source morphology, "
325 " for all sources\n"
326 "- 'point': use a point-source model for all sources\n"
327 "- 'fit: use a PSF fitting model to determine the number of components (not yet implemented)")
328 )
329 setSpectra = pexConfig.Field(
330 dtype=bool, default=True,
331 doc="Whether or not to solve for the best-fit spectra during initialization. "
332 "This makes initialization slightly longer, as it requires a convolution "
333 "to set the optimal spectra, but results in a much better initial log-likelihood "
334 "and reduced total runtime, with convergence in fewer iterations.")
336 # Mask-plane restrictions
337 badMask = pexConfig.ListField(
338 dtype=str, default=["BAD", "CR", "NO_DATA", "SAT", "SUSPECT", "EDGE"],
339 doc="Whether or not to process isolated sources in the deblender")
340 statsMask = pexConfig.ListField(dtype=str, default=["SAT", "INTRP", "NO_DATA"],
341 doc="Mask planes to ignore when performing statistics")
342 maskLimits = pexConfig.DictField(
343 keytype=str,
344 itemtype=float,
345 default={},
346 doc=("Mask planes with the corresponding limit on the fraction of masked pixels. "
347 "Sources violating this limit will not be deblended."),
348 )
350 # Size restrictions
351 maxNumberOfPeaks = pexConfig.Field(
352 dtype=int, default=0,
353 doc=("Only deblend the brightest maxNumberOfPeaks peaks in the parent"
354 " (<= 0: unlimited)"))
355 maxFootprintArea = pexConfig.Field(
356 dtype=int, default=1000000,
357 doc=("Maximum area for footprints before they are ignored as large; "
358 "non-positive means no threshold applied"))
359 maxFootprintSize = pexConfig.Field(
360 dtype=int, default=0,
361 doc=("Maximum linear dimension for footprints before they are ignored "
362 "as large; non-positive means no threshold applied"))
363 minFootprintAxisRatio = pexConfig.Field(
364 dtype=float, default=0.0,
365 doc=("Minimum axis ratio for footprints before they are ignored "
366 "as large; non-positive means no threshold applied"))
368 # Failure modes
369 fallback = pexConfig.Field(
370 dtype=bool, default=True,
371 doc="Whether or not to fallback to a smaller number of components if a source does not initialize"
372 )
373 notDeblendedMask = pexConfig.Field(
374 dtype=str, default="NOT_DEBLENDED", optional=True,
375 doc="Mask name for footprints not deblended, or None")
376 catchFailures = pexConfig.Field(
377 dtype=bool, default=True,
378 doc=("If True, catch exceptions thrown by the deblender, log them, "
379 "and set a flag on the parent, instead of letting them propagate up"))
382class ScarletDeblendTask(pipeBase.Task):
383 """ScarletDeblendTask
385 Split blended sources into individual sources.
387 This task has no return value; it only modifies the SourceCatalog in-place.
388 """
389 ConfigClass = ScarletDeblendConfig
390 _DefaultName = "scarletDeblend"
392 def __init__(self, schema, peakSchema=None, **kwargs):
393 """Create the task, adding necessary fields to the given schema.
395 Parameters
396 ----------
397 schema : `lsst.afw.table.schema.schema.Schema`
398 Schema object for measurement fields; will be modified in-place.
399 peakSchema : `lsst.afw.table.schema.schema.Schema`
400 Schema of Footprint Peaks that will be passed to the deblender.
401 Any fields beyond the PeakTable minimal schema will be transferred
402 to the main source Schema. If None, no fields will be transferred
403 from the Peaks.
404 filters : list of str
405 Names of the filters used for the eposures. This is needed to store
406 the SED as a field
407 **kwargs
408 Passed to Task.__init__.
409 """
410 pipeBase.Task.__init__(self, **kwargs)
412 peakMinimalSchema = afwDet.PeakTable.makeMinimalSchema()
413 if peakSchema is None:
414 # In this case, the peakSchemaMapper will transfer nothing, but
415 # we'll still have one
416 # to simplify downstream code
417 self.peakSchemaMapper = afwTable.SchemaMapper(peakMinimalSchema, schema)
418 else:
419 self.peakSchemaMapper = afwTable.SchemaMapper(peakSchema, schema)
420 for item in peakSchema:
421 if item.key not in peakMinimalSchema:
422 self.peakSchemaMapper.addMapping(item.key, item.field)
423 # Because SchemaMapper makes a copy of the output schema
424 # you give its ctor, it isn't updating this Schema in
425 # place. That's probably a design flaw, but in the
426 # meantime, we'll keep that schema in sync with the
427 # peakSchemaMapper.getOutputSchema() manually, by adding
428 # the same fields to both.
429 schema.addField(item.field)
430 assert schema == self.peakSchemaMapper.getOutputSchema(), "Logic bug mapping schemas"
431 self._addSchemaKeys(schema)
432 self.schema = schema
433 self.toCopyFromParent = [item.key for item in self.schema
434 if item.field.getName().startswith("merge_footprint")]
436 def _addSchemaKeys(self, schema):
437 """Add deblender specific keys to the schema
438 """
439 self.runtimeKey = schema.addField('deblend_runtime', type=np.float32, doc='runtime in ms')
441 self.iterKey = schema.addField('deblend_iterations', type=np.int32, doc='iterations to converge')
443 self.nChildKey = schema.addField('deblend_nChild', type=np.int32,
444 doc='Number of children this object has (defaults to 0)')
445 self.psfKey = schema.addField('deblend_deblendedAsPsf', type='Flag',
446 doc='Deblender thought this source looked like a PSF')
447 self.tooManyPeaksKey = schema.addField('deblend_tooManyPeaks', type='Flag',
448 doc='Source had too many peaks; '
449 'only the brightest were included')
450 self.tooBigKey = schema.addField('deblend_parentTooBig', type='Flag',
451 doc='Parent footprint covered too many pixels')
452 self.maskedKey = schema.addField('deblend_masked', type='Flag',
453 doc='Parent footprint was predominantly masked')
454 self.sedNotConvergedKey = schema.addField('deblend_sedConvergenceFailed', type='Flag',
455 doc='scarlet sed optimization did not converge before'
456 'config.maxIter')
457 self.morphNotConvergedKey = schema.addField('deblend_morphConvergenceFailed', type='Flag',
458 doc='scarlet morph optimization did not converge before'
459 'config.maxIter')
460 self.blendConvergenceFailedFlagKey = schema.addField('deblend_blendConvergenceFailedFlag',
461 type='Flag',
462 doc='at least one source in the blend'
463 'failed to converge')
464 self.edgePixelsKey = schema.addField('deblend_edgePixels', type='Flag',
465 doc='Source had flux on the edge of the parent footprint')
466 self.deblendFailedKey = schema.addField('deblend_failed', type='Flag',
467 doc="Deblending failed on source")
468 self.deblendErrorKey = schema.addField('deblend_error', type="String", size=25,
469 doc='Name of error if the blend failed')
470 self.deblendSkippedKey = schema.addField('deblend_skipped', type='Flag',
471 doc="Deblender skipped this source")
472 self.peakCenter = afwTable.Point2IKey.addFields(schema, name="deblend_peak_center",
473 doc="Center used to apply constraints in scarlet",
474 unit="pixel")
475 self.peakIdKey = schema.addField("deblend_peakId", type=np.int32,
476 doc="ID of the peak in the parent footprint. "
477 "This is not unique, but the combination of 'parent'"
478 "and 'peakId' should be for all child sources. "
479 "Top level blends with no parents have 'peakId=0'")
480 self.modelCenterFlux = schema.addField('deblend_peak_instFlux', type=float, units='count',
481 doc="The instFlux at the peak position of deblended mode")
482 self.modelTypeKey = schema.addField("deblend_modelType", type="String", size=25,
483 doc="The type of model used, for example "
484 "MultiExtendedSource, SingleExtendedSource, PointSource")
485 self.nPeaksKey = schema.addField("deblend_nPeaks", type=np.int32,
486 doc="Number of initial peaks in the blend. "
487 "This includes peaks that may have been culled "
488 "during deblending or failed to deblend")
489 self.parentNPeaksKey = schema.addField("deblend_parentNPeaks", type=np.int32,
490 doc="Same as deblend_n_peaks, but the number of peaks "
491 "in the parent footprint")
492 self.scarletFluxKey = schema.addField("deblend_scarletFlux", type=np.float32,
493 doc="Flux measurement from scarlet")
494 self.scarletLogLKey = schema.addField("deblend_logL", type=np.float32,
495 doc="Final logL, used to identify regressions in scarlet.")
497 # self.log.trace('Added keys to schema: %s', ", ".join(str(x) for x in
498 # (self.nChildKey, self.tooManyPeaksKey, self.tooBigKey))
499 # )
501 @pipeBase.timeMethod
502 def run(self, mExposure, mergedSources):
503 """Get the psf from each exposure and then run deblend().
505 Parameters
506 ----------
507 mExposure : `MultibandExposure`
508 The exposures should be co-added images of the same
509 shape and region of the sky.
510 mergedSources : `SourceCatalog`
511 The merged `SourceCatalog` that contains parent footprints
512 to (potentially) deblend.
514 Returns
515 -------
516 templateCatalogs: dict
517 Keys are the names of the filters and the values are
518 `lsst.afw.table.source.source.SourceCatalog`'s.
519 These are catalogs with heavy footprints that are the templates
520 created by the multiband templates.
521 """
522 return self.deblend(mExposure, mergedSources)
524 @pipeBase.timeMethod
525 def deblend(self, mExposure, sources):
526 """Deblend a data cube of multiband images
528 Parameters
529 ----------
530 mExposure : `MultibandExposure`
531 The exposures should be co-added images of the same
532 shape and region of the sky.
533 sources : `SourceCatalog`
534 The merged `SourceCatalog` that contains parent footprints
535 to (potentially) deblend.
537 Returns
538 -------
539 templateCatalogs : dict or None
540 Keys are the names of the filters and the values are
541 `lsst.afw.table.source.source.SourceCatalog`'s.
542 These are catalogs with heavy footprints that are the templates
543 created by the multiband templates.
544 """
545 import time
547 filters = mExposure.filters
548 self.log.info("Deblending {0} sources in {1} exposure bands".format(len(sources), len(mExposure)))
550 # Create the output catalogs
551 templateCatalogs = {}
552 # This must be returned but is not calculated right now, setting it to
553 # None to be consistent with doc string
554 for f in filters:
555 _catalog = afwTable.SourceCatalog(sources.table.clone())
556 _catalog.extend(sources)
557 templateCatalogs[f] = _catalog
559 n0 = len(sources)
560 nparents = 0
561 for pk, src in enumerate(sources):
562 foot = src.getFootprint()
563 bbox = foot.getBBox()
564 peaks = foot.getPeaks()
566 # Since we use the first peak for the parent object, we should
567 # propagate its flags to the parent source.
568 src.assign(peaks[0], self.peakSchemaMapper)
570 # Block of Skipping conditions
571 if len(peaks) < 2 and not self.config.processSingles:
572 for f in filters:
573 templateCatalogs[f][pk].set(self.runtimeKey, 0)
574 continue
575 if self._isLargeFootprint(foot):
576 src.set(self.tooBigKey, True)
577 self._skipParent(src, mExposure.mask)
578 self.log.trace('Parent %i: skipping large footprint', int(src.getId()))
579 continue
580 if self._isMasked(foot, mExposure):
581 src.set(self.maskedKey, True)
582 mask = np.bitwise_or.reduce(mExposure.mask[:, bbox].array, axis=0)
583 mask = afwImage.MaskX(mask, xy0=bbox.getMin())
584 self._skipParent(src, mask)
585 self.log.trace('Parent %i: skipping masked footprint', int(src.getId()))
586 continue
587 if self.config.maxNumberOfPeaks > 0 and len(peaks) > self.config.maxNumberOfPeaks:
588 src.set(self.tooManyPeaksKey, True)
589 self._skipParent(src, mExposure.mask)
590 msg = 'Parent {0}: Too many peaks, skipping blend'
591 self.log.trace(msg.format(int(src.getId())))
592 # Unlike meas_deblender, in scarlet we skip the entire blend
593 # if the number of peaks exceeds max peaks, since neglecting
594 # to model any peaks often results in catastrophic failure
595 # of scarlet to generate models for the brighter sources.
596 continue
598 nparents += 1
599 self.log.trace('Parent %i: deblending %i peaks', int(src.getId()), len(peaks))
600 # Run the deblender
601 blendError = None
602 try:
603 t0 = time.time()
604 # Build the parameter lists with the same ordering
605 blend, skipped = deblend(mExposure, foot, self.config)
606 tf = time.time()
607 runtime = (tf-t0)*1000
608 src.set(self.deblendFailedKey, False)
609 src.set(self.runtimeKey, runtime)
610 converged = _checkBlendConvergence(blend, self.config.relativeError)
611 src.set(self.blendConvergenceFailedFlagKey, converged)
612 sources = [src for src in blend.sources]
613 # Re-insert place holders for skipped sources
614 # to propagate them in the catalog so
615 # that the peaks stay consistent
616 for k in skipped:
617 sources.insert(k, None)
618 # Catch all errors and filter out the ones that we know about
619 except Exception as e:
620 blendError = type(e).__name__
621 if isinstance(e, ScarletGradientError):
622 src.set(self.iterKey, e.iterations)
623 elif not isinstance(e, IncompleteDataError):
624 blendError = "UnknownError"
625 self._skipParent(src, mExposure.mask)
626 if self.config.catchFailures:
627 # Make it easy to find UnknownErrors in the log file
628 self.log.warn("UnknownError")
629 import traceback
630 traceback.print_exc()
631 else:
632 raise
634 self.log.warn("Unable to deblend source %d: %s" % (src.getId(), blendError))
635 src.set(self.deblendFailedKey, True)
636 src.set(self.deblendErrorKey, blendError)
637 self._skipParent(src, mExposure.mask)
638 continue
640 # Add the merged source as a parent in the catalog for each band
641 templateParents = {}
642 parentId = src.getId()
643 for f in filters:
644 templateParents[f] = templateCatalogs[f][pk]
645 templateParents[f].set(self.nPeaksKey, len(foot.peaks))
646 templateParents[f].set(self.runtimeKey, runtime)
647 templateParents[f].set(self.iterKey, len(blend.loss))
648 logL = blend.loss[-1]-blend.observations[0].log_norm
649 templateParents[f].set(self.scarletLogLKey, logL)
651 # Add each source to the catalogs in each band
652 nchild = 0
653 for k, source in enumerate(sources):
654 # Skip any sources with no flux or that scarlet skipped because
655 # it could not initialize
656 if k in skipped:
657 # No need to propagate anything
658 continue
659 else:
660 src.set(self.deblendSkippedKey, False)
661 models = modelToHeavy(source, filters, xy0=bbox.getMin(),
662 observation=blend.observations[0])
664 flux = scarlet.measure.flux(source)
665 for fidx, f in enumerate(filters):
666 if len(models[f].getPeaks()) != 1:
667 err = "Heavy footprint should have a single peak, got {0}"
668 raise ValueError(err.format(len(models[f].peaks)))
669 cat = templateCatalogs[f]
670 child = self._addChild(src, cat, models[f], source, converged,
671 xy0=bbox.getMin(), flux=flux[fidx])
672 if parentId == 0:
673 child.setId(src.getId())
674 child.set(self.runtimeKey, runtime)
675 nchild += 1
677 # Set the number of children for each parent
678 for f in filters:
679 templateParents[f].set(self.nChildKey, nchild)
681 K = len(list(templateCatalogs.values())[0])
682 self.log.info('Deblended: of %i sources, %i were deblended, creating %i children, total %i sources'
683 % (n0, nparents, K-n0, K))
684 return templateCatalogs
686 def _isLargeFootprint(self, footprint):
687 """Returns whether a Footprint is large
689 'Large' is defined by thresholds on the area, size and axis ratio.
690 These may be disabled independently by configuring them to be
691 non-positive.
693 This is principally intended to get rid of satellite streaks, which the
694 deblender or other downstream processing can have trouble dealing with
695 (e.g., multiple large HeavyFootprints can chew up memory).
696 """
697 if self.config.maxFootprintArea > 0 and footprint.getArea() > self.config.maxFootprintArea:
698 return True
699 if self.config.maxFootprintSize > 0:
700 bbox = footprint.getBBox()
701 if max(bbox.getWidth(), bbox.getHeight()) > self.config.maxFootprintSize:
702 return True
703 if self.config.minFootprintAxisRatio > 0:
704 axes = afwEll.Axes(footprint.getShape())
705 if axes.getB() < self.config.minFootprintAxisRatio*axes.getA():
706 return True
707 return False
709 def _isMasked(self, footprint, mExposure):
710 """Returns whether the footprint violates the mask limits"""
711 bbox = footprint.getBBox()
712 mask = np.bitwise_or.reduce(mExposure.mask[:, bbox].array, axis=0)
713 size = float(footprint.getArea())
714 for maskName, limit in self.config.maskLimits.items():
715 maskVal = mExposure.mask.getPlaneBitMask(maskName)
716 _mask = afwImage.MaskX(mask & maskVal, xy0=bbox.getMin())
717 unmaskedSpan = footprint.spans.intersectNot(_mask) # spanset of unmasked pixels
718 if (size - unmaskedSpan.getArea())/size > limit:
719 return True
720 return False
722 def _skipParent(self, source, masks):
723 """Indicate that the parent source is not being deblended
725 We set the appropriate flags and masks for each exposure.
727 Parameters
728 ----------
729 source : `lsst.afw.table.source.source.SourceRecord`
730 The source to flag as skipped
731 masks : list of `lsst.afw.image.MaskX`
732 The mask in each band to update with the non-detection
733 """
734 fp = source.getFootprint()
735 source.set(self.deblendSkippedKey, True)
736 if self.config.notDeblendedMask:
737 for mask in masks:
738 mask.addMaskPlane(self.config.notDeblendedMask)
739 fp.spans.setMask(mask, mask.getPlaneBitMask(self.config.notDeblendedMask))
740 # The deblender didn't run on this source, so it has zero runtime
741 source.set(self.runtimeKey, 0)
742 # Set the center of the parent
743 bbox = fp.getBBox()
744 centerX = int(bbox.getMinX()+bbox.getWidth()/2)
745 centerY = int(bbox.getMinY()+bbox.getHeight()/2)
746 source.set(self.peakCenter, Point2I(centerX, centerY))
747 # There are no deblended children, so nChild = 0
748 source.set(self.nChildKey, 0)
749 # But we also want to know how many peaks that we would have
750 # deblended if the parent wasn't skipped.
751 source.set(self.nPeaksKey, len(fp.peaks))
752 # The blend was skipped, so it didn't take any iterations
753 source.set(self.iterKey, 0)
754 # Top level parents are not a detected peak, so they have no peakId
755 source.set(self.peakIdKey, 0)
756 # Top level parents also have no parentNPeaks
757 source.set(self.parentNPeaksKey, 0)
759 def _addChild(self, parent, sources, heavy, scarletSource, blend_converged, xy0, flux):
760 """Add a child to a catalog
762 This creates a new child in the source catalog,
763 assigning it a parent id, adding a footprint,
764 and setting all appropriate flags based on the
765 deblender result.
766 """
767 assert len(heavy.getPeaks()) == 1
768 src = sources.addNew()
769 for key in self.toCopyFromParent:
770 src.set(key, parent.get(key))
771 src.assign(heavy.getPeaks()[0], self.peakSchemaMapper)
772 src.setParent(parent.getId())
773 src.setFootprint(heavy)
774 # Set the psf key based on whether or not the source was
775 # deblended using the PointSource model.
776 # This key is not that useful anymore since we now keep track of
777 # `modelType`, but we continue to propagate it in case code downstream
778 # is expecting it.
779 src.set(self.psfKey, scarletSource.__class__.__name__ == "PointSource")
780 src.set(self.runtimeKey, 0)
781 src.set(self.blendConvergenceFailedFlagKey, not blend_converged)
783 # Set the position of the peak from the parent footprint
784 # This will make it easier to match the same source across
785 # deblenders and across observations, where the peak
786 # position is unlikely to change unless enough time passes
787 # for a source to move on the sky.
788 peak = scarletSource.detectedPeak
789 src.set(self.peakCenter, Point2I(peak["i_x"], peak["i_y"]))
790 src.set(self.peakIdKey, peak["id"])
792 # The children have a single peak
793 src.set(self.nPeaksKey, 1)
795 # Store the flux at the center of the model and the total
796 # scarlet flux measurement.
797 morph = afwDet.multiband.heavyFootprintToImage(heavy).image.array
799 # Set the flux at the center of the model (for SNR)
800 try:
801 cy, cx = scarletSource.center
802 cy = np.max([np.min([int(np.round(cy)), morph.shape[0]-1]), 0])
803 cx = np.max([np.min([int(np.round(cx)), morph.shape[1]-1]), 0])
804 src.set(self.modelCenterFlux, morph[cy, cx])
805 except AttributeError:
806 msg = "Did not recognize coordinates for source type of `{0}`, "
807 msg += "could not write coordinates or center flux. "
808 msg += "Add `{0}` to meas_extensions_scarlet to properly persist this information."
809 logger.warning(msg.format(type(scarletSource)))
811 src.set(self.modelTypeKey, scarletSource.__class__.__name__)
812 # Include the source flux in the model space in the catalog.
813 # This uses the narrower model PSF, which ensures that all sources
814 # not located on an edge have all of their flux included in the
815 # measurement.
816 src.set(self.scarletFluxKey, flux)
817 return src