Coverage for python/lsst/meas/extensions/scarlet/scarletDeblendTask.py : 14%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of meas_extensions_scarlet.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
22import numpy as np
23import scarlet
24from scarlet.psf import ImagePSF, GaussianPSF
25from scarlet import Blend, Frame, Observation
26from scarlet.initialization import initAllSources
28import lsst.log
29import lsst.pex.config as pexConfig
30from lsst.pex.exceptions import InvalidParameterError
31import lsst.pipe.base as pipeBase
32from lsst.geom import Point2I, Box2I, Point2D
33import lsst.afw.geom.ellipses as afwEll
34import lsst.afw.image.utils
35import lsst.afw.image as afwImage
36import lsst.afw.detection as afwDet
37import lsst.afw.table as afwTable
39from .source import modelToHeavy
41__all__ = ["deblend", "ScarletDeblendConfig", "ScarletDeblendTask"]
43logger = lsst.log.Log.getLogger("meas.deblender.deblend")
46class IncompleteDataError(Exception):
47 """The PSF could not be computed due to incomplete data
48 """
49 pass
52class ScarletGradientError(Exception):
53 """An error occurred during optimization
55 This error occurs when the optimizer encounters
56 a NaN value while calculating the gradient.
57 """
58 def __init__(self, iterations, sources):
59 self.iterations = iterations
60 self.sources = sources
61 msg = ("ScalarGradientError in iteration {0}. "
62 "NaN values introduced in sources {1}")
63 self.message = msg.format(iterations, sources)
65 def __str__(self):
66 return self.message
69def _checkBlendConvergence(blend, f_rel):
70 """Check whether or not a blend has converged
71 """
72 deltaLoss = np.abs(blend.loss[-2] - blend.loss[-1])
73 convergence = f_rel * np.abs(blend.loss[-1])
74 return deltaLoss < convergence
77def _getPsfFwhm(psf):
78 """Calculate the FWHM of the `psf`
79 """
80 return psf.computeShape().getDeterminantRadius() * 2.35
83def _computePsfImage(self, position=None):
84 """Get a multiband PSF image
85 The PSF Kernel Image is computed for each band
86 and combined into a (filter, y, x) array and stored
87 as `self._psfImage`.
88 The result is not cached, so if the same PSF is expected
89 to be used multiple times it is a good idea to store the
90 result in another variable.
91 Note: this is a temporary fix during the deblender sprint.
92 In the future this function will replace the current method
93 in `afw.MultibandExposure.computePsfImage` (DM-19789).
94 Parameters
95 ----------
96 position : `Point2D` or `tuple`
97 Coordinates to evaluate the PSF. If `position` is `None`
98 then `Psf.getAveragePosition()` is used.
99 Returns
100 -------
101 self._psfImage: array
102 The multiband PSF image.
103 """
104 psfs = []
105 # Make the coordinates into a Point2D (if necessary)
106 if not isinstance(position, Point2D) and position is not None:
107 position = Point2D(position[0], position[1])
109 for bidx, single in enumerate(self.singles):
110 try:
111 if position is None:
112 psf = single.getPsf().computeImage()
113 psfs.append(psf)
114 else:
115 psf = single.getPsf().computeKernelImage(position)
116 psfs.append(psf)
117 except InvalidParameterError:
118 # This band failed to compute the PSF due to incomplete data
119 # at that location. This is unlikely to be a problem for Rubin,
120 # however the edges of some HSC COSMOS fields contain incomplete
121 # data in some bands, so we track this error to distinguish it
122 # from unknown errors.
123 msg = "Failed to compute PSF at {} in band {}"
124 raise IncompleteDataError(msg.format(position, self.filters[bidx]))
126 left = np.min([psf.getBBox().getMinX() for psf in psfs])
127 bottom = np.min([psf.getBBox().getMinY() for psf in psfs])
128 right = np.max([psf.getBBox().getMaxX() for psf in psfs])
129 top = np.max([psf.getBBox().getMaxY() for psf in psfs])
130 bbox = Box2I(Point2I(left, bottom), Point2I(right, top))
131 psfs = [afwImage.utils.projectImage(psf, bbox) for psf in psfs]
132 psfImage = afwImage.MultibandImage.fromImages(self.filters, psfs)
133 return psfImage
136def getFootprintMask(footprint, mExposure):
137 """Mask pixels outside the footprint
139 Parameters
140 ----------
141 mExposure : `lsst.image.MultibandExposure`
142 - The multiband exposure containing the image,
143 mask, and variance data
144 footprint : `lsst.detection.Footprint`
145 - The footprint of the parent to deblend
147 Returns
148 -------
149 footprintMask : array
150 Boolean array with pixels not in the footprint set to one.
151 """
152 bbox = footprint.getBBox()
153 fpMask = afwImage.Mask(bbox)
154 footprint.spans.setMask(fpMask, 1)
155 fpMask = ~fpMask.getArray().astype(bool)
156 return fpMask
159def deblend(mExposure, footprint, config):
160 """Deblend a parent footprint
162 Parameters
163 ----------
164 mExposure : `lsst.image.MultibandExposure`
165 - The multiband exposure containing the image,
166 mask, and variance data
167 footprint : `lsst.detection.Footprint`
168 - The footprint of the parent to deblend
169 config : `ScarletDeblendConfig`
170 - Configuration of the deblending task
171 """
172 # Extract coordinates from each MultiColorPeak
173 bbox = footprint.getBBox()
175 # Create the data array from the masked images
176 images = mExposure.image[:, bbox].array
178 # Use the inverse variance as the weights
179 if config.useWeights:
180 weights = 1/mExposure.variance[:, bbox].array
181 else:
182 weights = np.ones_like(images)
183 badPixels = mExposure.mask.getPlaneBitMask(config.badMask)
184 mask = mExposure.mask[:, bbox].array & badPixels
185 weights[mask > 0] = 0
187 # Mask out the pixels outside the footprint
188 mask = getFootprintMask(footprint, mExposure)
189 weights *= ~mask
191 psfs = _computePsfImage(mExposure, footprint.getCentroid()).array.astype(np.float32)
192 psfs = ImagePSF(psfs)
193 model_psf = GaussianPSF(sigma=(config.modelPsfSigma,)*len(mExposure.filters))
195 frame = Frame(images.shape, psfs=model_psf, channels=mExposure.filters)
196 observation = Observation(images, psfs=psfs, weights=weights, channels=mExposure.filters)
197 observation.match(frame)
199 assert(config.sourceModel in ["single", "double", "point", "fit"])
201 # Set the appropriate number of components
202 if config.sourceModel == "single":
203 maxComponents = 1
204 elif config.sourceModel == "double":
205 maxComponents = 2
206 elif config.sourceModel == "compact":
207 raise NotImplementedError("CompactSource initialization has not yet been ported"
208 "to the stack version of scarlet")
209 elif config.sourceModel == "point":
210 maxComponents = 0
211 elif config.sourceModel == "fit":
212 # It is likely in the future that there will be some heuristic
213 # used to determine what type of model to use for each source,
214 # but that has not yet been implemented (see DM-22551)
215 raise NotImplementedError("sourceModel 'fit' has not been implemented yet")
217 # Convert the centers to pixel coordinates
218 xmin = bbox.getMinX()
219 ymin = bbox.getMinY()
220 centers = [np.array([peak.getIy()-ymin, peak.getIx()-xmin], dtype=int) for peak in footprint.peaks]
222 # Only deblend sources that can be initialized
223 sources, skipped = initAllSources(
224 frame=frame,
225 centers=centers,
226 observation=observation,
227 symmetric=config.symmetric,
228 monotonic=config.monotonic,
229 thresh=config.morphThresh,
230 maxComponents=maxComponents,
231 edgeDistance=config.edgeDistance,
232 shifting=False,
233 downgrade=config.downgrade,
234 fallback=config.fallback,
235 )
237 # Attach the peak to all of the initialized sources
238 srcIndex = 0
239 for k, center in enumerate(centers):
240 if k not in skipped:
241 # This is just to make sure that there isn't a coding bug
242 assert np.all(sources[srcIndex].center == center)
243 # Store the record for the peak with the appropriate source
244 sources[srcIndex].detectedPeak = footprint.peaks[k]
245 srcIndex += 1
247 # Create the blend and attempt to optimize it
248 blend = Blend(sources, observation)
249 try:
250 blend.fit(max_iter=config.maxIter, e_rel=config.relativeError)
251 except ArithmeticError:
252 # This occurs when a gradient update produces a NaN value
253 # This is usually due to a source initialized with a
254 # negative SED or no flux, often because the peak
255 # is a noise fluctuation in one band and not a real source.
256 iterations = len(blend.loss)
257 failedSources = []
258 for k, src in enumerate(sources):
259 if np.any(~np.isfinite(src.get_model())):
260 failedSources.append(k)
261 raise ScarletGradientError(iterations, failedSources)
263 return blend, skipped
266class ScarletDeblendConfig(pexConfig.Config):
267 """MultibandDeblendConfig
269 Configuration for the multiband deblender.
270 The parameters are organized by the parameter types, which are
271 - Stopping Criteria: Used to determine if the fit has converged
272 - Position Fitting Criteria: Used to fit the positions of the peaks
273 - Constraints: Used to apply constraints to the peaks and their components
274 - Other: Parameters that don't fit into the above categories
275 """
276 # Stopping Criteria
277 maxIter = pexConfig.Field(dtype=int, default=300,
278 doc=("Maximum number of iterations to deblend a single parent"))
279 relativeError = pexConfig.Field(dtype=float, default=1e-4,
280 doc=("Change in the loss function between"
281 "iterations to exit fitter"))
283 # Blend Configuration options
284 edgeDistance = pexConfig.Field(dtype=int, default=1,
285 doc="All sources with flux within `edgeDistance` from the edge "
286 "will be considered edge sources.")
288 # Constraints
289 morphThresh = pexConfig.Field(dtype=float, default=1,
290 doc="Fraction of background RMS a pixel must have"
291 "to be included in the initial morphology")
292 monotonic = pexConfig.Field(dtype=bool, default=True, doc="Make models monotonic")
293 symmetric = pexConfig.Field(dtype=bool, default=False, doc="Make models symmetric")
295 # Other scarlet paremeters
296 useWeights = pexConfig.Field(
297 dtype=bool, default=True,
298 doc=("Whether or not use use inverse variance weighting."
299 "If `useWeights` is `False` then flat weights are used"))
300 modelPsfSize = pexConfig.Field(
301 dtype=int, default=11,
302 doc="Model PSF side length in pixels")
303 modelPsfSigma = pexConfig.Field(
304 dtype=float, default=0.8,
305 doc="Define sigma for the model frame PSF")
306 saveTemplates = pexConfig.Field(
307 dtype=bool, default=True,
308 doc="Whether or not to save the SEDs and templates")
309 processSingles = pexConfig.Field(
310 dtype=bool, default=True,
311 doc="Whether or not to process isolated sources in the deblender")
312 sourceModel = pexConfig.Field(
313 dtype=str, default="double",
314 doc=("How to determine which model to use for sources, from\n"
315 "- 'single': use a single component for all sources\n"
316 "- 'double': use a bulge disk model for all sources\n"
317 "- 'compact': use a single component model, initialzed with a point source morphology, "
318 " for all sources\n"
319 "- 'point': use a point-source model for all sources\n"
320 "- 'fit: use a PSF fitting model to determine the number of components (not yet implemented)")
321 )
322 downgrade = pexConfig.Field(
323 dtype=bool, default=False,
324 doc="Whether or not to downgrade the number of components for sources in small bounding boxes"
325 )
327 # Mask-plane restrictions
328 badMask = pexConfig.ListField(
329 dtype=str, default=["BAD", "CR", "NO_DATA", "SAT", "SUSPECT"],
330 doc="Whether or not to process isolated sources in the deblender")
331 statsMask = pexConfig.ListField(dtype=str, default=["SAT", "INTRP", "NO_DATA"],
332 doc="Mask planes to ignore when performing statistics")
333 maskLimits = pexConfig.DictField(
334 keytype=str,
335 itemtype=float,
336 default={},
337 doc=("Mask planes with the corresponding limit on the fraction of masked pixels. "
338 "Sources violating this limit will not be deblended."),
339 )
341 # Size restrictions
342 maxNumberOfPeaks = pexConfig.Field(
343 dtype=int, default=0,
344 doc=("Only deblend the brightest maxNumberOfPeaks peaks in the parent"
345 " (<= 0: unlimited)"))
346 maxFootprintArea = pexConfig.Field(
347 dtype=int, default=1000000,
348 doc=("Maximum area for footprints before they are ignored as large; "
349 "non-positive means no threshold applied"))
350 maxFootprintSize = pexConfig.Field(
351 dtype=int, default=0,
352 doc=("Maximum linear dimension for footprints before they are ignored "
353 "as large; non-positive means no threshold applied"))
354 minFootprintAxisRatio = pexConfig.Field(
355 dtype=float, default=0.0,
356 doc=("Minimum axis ratio for footprints before they are ignored "
357 "as large; non-positive means no threshold applied"))
359 # Failure modes
360 fallback = pexConfig.Field(
361 dtype=bool, default=True,
362 doc="Whether or not to fallback to a smaller number of components if a source does not initialize"
363 )
364 notDeblendedMask = pexConfig.Field(
365 dtype=str, default="NOT_DEBLENDED", optional=True,
366 doc="Mask name for footprints not deblended, or None")
367 catchFailures = pexConfig.Field(
368 dtype=bool, default=True,
369 doc=("If True, catch exceptions thrown by the deblender, log them, "
370 "and set a flag on the parent, instead of letting them propagate up"))
373class ScarletDeblendTask(pipeBase.Task):
374 """ScarletDeblendTask
376 Split blended sources into individual sources.
378 This task has no return value; it only modifies the SourceCatalog in-place.
379 """
380 ConfigClass = ScarletDeblendConfig
381 _DefaultName = "scarletDeblend"
383 def __init__(self, schema, peakSchema=None, **kwargs):
384 """Create the task, adding necessary fields to the given schema.
386 Parameters
387 ----------
388 schema : `lsst.afw.table.schema.schema.Schema`
389 Schema object for measurement fields; will be modified in-place.
390 peakSchema : `lsst.afw.table.schema.schema.Schema`
391 Schema of Footprint Peaks that will be passed to the deblender.
392 Any fields beyond the PeakTable minimal schema will be transferred
393 to the main source Schema. If None, no fields will be transferred
394 from the Peaks.
395 filters : list of str
396 Names of the filters used for the eposures. This is needed to store
397 the SED as a field
398 **kwargs
399 Passed to Task.__init__.
400 """
401 pipeBase.Task.__init__(self, **kwargs)
403 peakMinimalSchema = afwDet.PeakTable.makeMinimalSchema()
404 if peakSchema is None:
405 # In this case, the peakSchemaMapper will transfer nothing, but
406 # we'll still have one
407 # to simplify downstream code
408 self.peakSchemaMapper = afwTable.SchemaMapper(peakMinimalSchema, schema)
409 else:
410 self.peakSchemaMapper = afwTable.SchemaMapper(peakSchema, schema)
411 for item in peakSchema:
412 if item.key not in peakMinimalSchema:
413 self.peakSchemaMapper.addMapping(item.key, item.field)
414 # Because SchemaMapper makes a copy of the output schema
415 # you give its ctor, it isn't updating this Schema in
416 # place. That's probably a design flaw, but in the
417 # meantime, we'll keep that schema in sync with the
418 # peakSchemaMapper.getOutputSchema() manually, by adding
419 # the same fields to both.
420 schema.addField(item.field)
421 assert schema == self.peakSchemaMapper.getOutputSchema(), "Logic bug mapping schemas"
422 self._addSchemaKeys(schema)
423 self.schema = schema
424 self.toCopyFromParent = [item.key for item in self.schema
425 if item.field.getName().startswith("merge_footprint")]
427 def _addSchemaKeys(self, schema):
428 """Add deblender specific keys to the schema
429 """
430 self.runtimeKey = schema.addField('deblend_runtime', type=np.float32, doc='runtime in ms')
432 self.iterKey = schema.addField('deblend_iterations', type=np.int32, doc='iterations to converge')
434 self.nChildKey = schema.addField('deblend_nChild', type=np.int32,
435 doc='Number of children this object has (defaults to 0)')
436 self.psfKey = schema.addField('deblend_deblendedAsPsf', type='Flag',
437 doc='Deblender thought this source looked like a PSF')
438 self.tooManyPeaksKey = schema.addField('deblend_tooManyPeaks', type='Flag',
439 doc='Source had too many peaks; '
440 'only the brightest were included')
441 self.tooBigKey = schema.addField('deblend_parentTooBig', type='Flag',
442 doc='Parent footprint covered too many pixels')
443 self.maskedKey = schema.addField('deblend_masked', type='Flag',
444 doc='Parent footprint was predominantly masked')
445 self.sedNotConvergedKey = schema.addField('deblend_sedConvergenceFailed', type='Flag',
446 doc='scarlet sed optimization did not converge before'
447 'config.maxIter')
448 self.morphNotConvergedKey = schema.addField('deblend_morphConvergenceFailed', type='Flag',
449 doc='scarlet morph optimization did not converge before'
450 'config.maxIter')
451 self.blendConvergenceFailedFlagKey = schema.addField('deblend_blendConvergenceFailedFlag',
452 type='Flag',
453 doc='at least one source in the blend'
454 'failed to converge')
455 self.edgePixelsKey = schema.addField('deblend_edgePixels', type='Flag',
456 doc='Source had flux on the edge of the parent footprint')
457 self.deblendFailedKey = schema.addField('deblend_failed', type='Flag',
458 doc="Deblending failed on source")
459 self.deblendErrorKey = schema.addField('deblend_error', type="String", size=25,
460 doc='Name of error if the blend failed')
461 self.deblendSkippedKey = schema.addField('deblend_skipped', type='Flag',
462 doc="Deblender skipped this source")
463 self.peakCenter = afwTable.Point2IKey.addFields(schema, name="deblend_peak_center",
464 doc="Center used to apply constraints in scarlet",
465 unit="pixel")
466 self.peakIdKey = schema.addField("deblend_peakId", type=np.int32,
467 doc="ID of the peak in the parent footprint. "
468 "This is not unique, but the combination of 'parent'"
469 "and 'peakId' should be for all child sources. "
470 "Top level blends with no parents have 'peakId=0'")
471 self.modelCenterFlux = schema.addField('deblend_peak_instFlux', type=float, units='count',
472 doc="The instFlux at the peak position of deblended mode")
473 self.modelTypeKey = schema.addField("deblend_modelType", type="String", size=20,
474 doc="The type of model used, for example "
475 "MultiExtendedSource, SingleExtendedSource, PointSource")
476 self.edgeFluxFlagKey = schema.addField("deblend_edgeFluxFlag", type="Flag",
477 doc="Source has flux on the edge of the image")
478 self.nPeaksKey = schema.addField("deblend_nPeaks", type=np.int32,
479 doc="Number of initial peaks in the blend. "
480 "This includes peaks that may have been culled "
481 "during deblending or failed to deblend")
482 self.parentNPeaksKey = schema.addField("deblend_parentNPeaks", type=np.int32,
483 doc="Same as deblend_n_peaks, but the number of peaks "
484 "in the parent footprint")
485 self.scarletFluxKey = schema.addField("deblend_scarletFlux", type=np.float32,
486 doc="Flux measurement from scarlet")
487 self.scarletLogLKey = schema.addField("deblend_logL", type=np.float32,
488 doc="Final logL, used to identify regressions in scarlet.")
490 # self.log.trace('Added keys to schema: %s', ", ".join(str(x) for x in
491 # (self.nChildKey, self.tooManyPeaksKey, self.tooBigKey))
492 # )
494 @pipeBase.timeMethod
495 def run(self, mExposure, mergedSources):
496 """Get the psf from each exposure and then run deblend().
498 Parameters
499 ----------
500 mExposure : `MultibandExposure`
501 The exposures should be co-added images of the same
502 shape and region of the sky.
503 mergedSources : `SourceCatalog`
504 The merged `SourceCatalog` that contains parent footprints
505 to (potentially) deblend.
507 Returns
508 -------
509 templateCatalogs: dict
510 Keys are the names of the filters and the values are
511 `lsst.afw.table.source.source.SourceCatalog`'s.
512 These are catalogs with heavy footprints that are the templates
513 created by the multiband templates.
514 """
515 return self.deblend(mExposure, mergedSources)
517 @pipeBase.timeMethod
518 def deblend(self, mExposure, sources):
519 """Deblend a data cube of multiband images
521 Parameters
522 ----------
523 mExposure : `MultibandExposure`
524 The exposures should be co-added images of the same
525 shape and region of the sky.
526 sources : `SourceCatalog`
527 The merged `SourceCatalog` that contains parent footprints
528 to (potentially) deblend.
530 Returns
531 -------
532 templateCatalogs : dict or None
533 Keys are the names of the filters and the values are
534 `lsst.afw.table.source.source.SourceCatalog`'s.
535 These are catalogs with heavy footprints that are the templates
536 created by the multiband templates.
537 """
538 import time
540 filters = mExposure.filters
541 self.log.info("Deblending {0} sources in {1} exposure bands".format(len(sources), len(mExposure)))
543 # Create the output catalogs
544 templateCatalogs = {}
545 # This must be returned but is not calculated right now, setting it to
546 # None to be consistent with doc string
547 for f in filters:
548 _catalog = afwTable.SourceCatalog(sources.table.clone())
549 _catalog.extend(sources)
550 templateCatalogs[f] = _catalog
552 n0 = len(sources)
553 nparents = 0
554 for pk, src in enumerate(sources):
555 foot = src.getFootprint()
556 bbox = foot.getBBox()
557 peaks = foot.getPeaks()
559 # Since we use the first peak for the parent object, we should
560 # propagate its flags to the parent source.
561 src.assign(peaks[0], self.peakSchemaMapper)
563 # Block of Skipping conditions
564 if len(peaks) < 2 and not self.config.processSingles:
565 for f in filters:
566 templateCatalogs[f][pk].set(self.runtimeKey, 0)
567 continue
568 if self._isLargeFootprint(foot):
569 src.set(self.tooBigKey, True)
570 self._skipParent(src, mExposure.mask)
571 self.log.trace('Parent %i: skipping large footprint', int(src.getId()))
572 continue
573 if self._isMasked(foot, mExposure):
574 src.set(self.maskedKey, True)
575 mask = np.bitwise_or.reduce(mExposure.mask[:, bbox].array, axis=0)
576 mask = afwImage.MaskX(mask, xy0=bbox.getMin())
577 self._skipParent(src, mask)
578 self.log.trace('Parent %i: skipping masked footprint', int(src.getId()))
579 continue
580 if self.config.maxNumberOfPeaks > 0 and len(peaks) > self.config.maxNumberOfPeaks:
581 src.set(self.tooManyPeaksKey, True)
582 self._skipParent(src, mExposure.mask)
583 msg = 'Parent {0}: Too many peaks, skipping blend'
584 self.log.trace(msg.format(int(src.getId())))
585 # Unlike meas_deblender, in scarlet we skip the entire blend
586 # if the number of peaks exceeds max peaks, since neglecting
587 # to model any peaks often results in catastrophic failure
588 # of scarlet to generate models for the brighter sources.
589 continue
591 nparents += 1
592 self.log.trace('Parent %i: deblending %i peaks', int(src.getId()), len(peaks))
593 # Run the deblender
594 blendError = None
595 try:
596 t0 = time.time()
597 # Build the parameter lists with the same ordering
598 blend, skipped = deblend(mExposure, foot, self.config)
599 tf = time.time()
600 runtime = (tf-t0)*1000
601 src.set(self.deblendFailedKey, False)
602 src.set(self.runtimeKey, runtime)
603 converged = _checkBlendConvergence(blend, self.config.relativeError)
604 src.set(self.blendConvergenceFailedFlagKey, converged)
605 sources = [src for src in blend.sources]
606 # Re-insert place holders for skipped sources
607 # to propagate them in the catalog so
608 # that the peaks stay consistent
609 for k in skipped:
610 sources.insert(k, None)
611 # Catch all errors and filter out the ones that we know about
612 except Exception as e:
613 blendError = type(e).__name__
614 if isinstance(e, ScarletGradientError):
615 src.set(self.iterKey, e.iterations)
616 elif not isinstance(e, IncompleteDataError):
617 blendError = "UnknownError"
618 self._skipParent(src, mExposure.mask)
619 if self.config.catchFailures:
620 # Make it easy to find UnknownErrors in the log file
621 self.log.warn("UnknownError")
622 import traceback
623 traceback.print_exc()
624 else:
625 raise
627 self.log.warn("Unable to deblend source %d: %s" % (src.getId(), blendError))
628 src.set(self.deblendFailedKey, True)
629 src.set(self.deblendErrorKey, blendError)
630 self._skipParent(src, mExposure.mask)
631 continue
633 # Add the merged source as a parent in the catalog for each band
634 templateParents = {}
635 parentId = src.getId()
636 for f in filters:
637 templateParents[f] = templateCatalogs[f][pk]
638 templateParents[f].set(self.nPeaksKey, len(foot.peaks))
639 templateParents[f].set(self.runtimeKey, runtime)
640 templateParents[f].set(self.iterKey, len(blend.loss))
641 logL = blend.loss[-1]-blend.observations[0].log_norm
642 templateParents[f].set(self.scarletLogLKey, logL)
644 # Add each source to the catalogs in each band
645 nchild = 0
646 for k, source in enumerate(sources):
647 # Skip any sources with no flux or that scarlet skipped because
648 # it could not initialize
649 if k in skipped:
650 # No need to propagate anything
651 continue
652 else:
653 src.set(self.deblendSkippedKey, False)
654 models = modelToHeavy(source, filters, xy0=bbox.getMin(),
655 observation=blend.observations[0])
657 flux = scarlet.measure.flux(source)
658 for fidx, f in enumerate(filters):
659 if len(models[f].getPeaks()) != 1:
660 err = "Heavy footprint should have a single peak, got {0}"
661 raise ValueError(err.format(len(models[f].peaks)))
662 cat = templateCatalogs[f]
663 child = self._addChild(src, cat, models[f], source, converged,
664 xy0=bbox.getMin(), flux=flux[fidx])
665 if parentId == 0:
666 child.setId(src.getId())
667 child.set(self.runtimeKey, runtime)
668 nchild += 1
670 # Set the number of children for each parent
671 for f in filters:
672 templateParents[f].set(self.nChildKey, nchild)
674 K = len(list(templateCatalogs.values())[0])
675 self.log.info('Deblended: of %i sources, %i were deblended, creating %i children, total %i sources'
676 % (n0, nparents, K-n0, K))
677 return templateCatalogs
679 def _isLargeFootprint(self, footprint):
680 """Returns whether a Footprint is large
682 'Large' is defined by thresholds on the area, size and axis ratio.
683 These may be disabled independently by configuring them to be
684 non-positive.
686 This is principally intended to get rid of satellite streaks, which the
687 deblender or other downstream processing can have trouble dealing with
688 (e.g., multiple large HeavyFootprints can chew up memory).
689 """
690 if self.config.maxFootprintArea > 0 and footprint.getArea() > self.config.maxFootprintArea:
691 return True
692 if self.config.maxFootprintSize > 0:
693 bbox = footprint.getBBox()
694 if max(bbox.getWidth(), bbox.getHeight()) > self.config.maxFootprintSize:
695 return True
696 if self.config.minFootprintAxisRatio > 0:
697 axes = afwEll.Axes(footprint.getShape())
698 if axes.getB() < self.config.minFootprintAxisRatio*axes.getA():
699 return True
700 return False
702 def _isMasked(self, footprint, mExposure):
703 """Returns whether the footprint violates the mask limits"""
704 bbox = footprint.getBBox()
705 mask = np.bitwise_or.reduce(mExposure.mask[:, bbox].array, axis=0)
706 size = float(footprint.getArea())
707 for maskName, limit in self.config.maskLimits.items():
708 maskVal = mExposure.mask.getPlaneBitMask(maskName)
709 _mask = afwImage.MaskX(mask & maskVal, xy0=bbox.getMin())
710 unmaskedSpan = footprint.spans.intersectNot(_mask) # spanset of unmasked pixels
711 if (size - unmaskedSpan.getArea())/size > limit:
712 return True
713 return False
715 def _skipParent(self, source, masks):
716 """Indicate that the parent source is not being deblended
718 We set the appropriate flags and masks for each exposure.
720 Parameters
721 ----------
722 source : `lsst.afw.table.source.source.SourceRecord`
723 The source to flag as skipped
724 masks : list of `lsst.afw.image.MaskX`
725 The mask in each band to update with the non-detection
726 """
727 fp = source.getFootprint()
728 source.set(self.deblendSkippedKey, True)
729 if self.config.notDeblendedMask:
730 for mask in masks:
731 mask.addMaskPlane(self.config.notDeblendedMask)
732 fp.spans.setMask(mask, mask.getPlaneBitMask(self.config.notDeblendedMask))
733 # The deblender didn't run on this source, so it has zero runtime
734 source.set(self.runtimeKey, 0)
735 # Set the center of the parent
736 bbox = fp.getBBox()
737 centerX = int(bbox.getMinX()+bbox.getWidth()/2)
738 centerY = int(bbox.getMinY()+bbox.getHeight()/2)
739 source.set(self.peakCenter, Point2I(centerX, centerY))
740 # There are no deblended children, so nChild = 0
741 source.set(self.nChildKey, 0)
742 # But we also want to know how many peaks that we would have
743 # deblended if the parent wasn't skipped.
744 source.set(self.nPeaksKey, len(fp.peaks))
745 # The blend was skipped, so it didn't take any iterations
746 source.set(self.iterKey, 0)
747 # Top level parents are not a detected peak, so they have no peakId
748 source.set(self.peakIdKey, 0)
749 # Top level parents also have no parentNPeaks
750 source.set(self.parentNPeaksKey, 0)
752 def _addChild(self, parent, sources, heavy, scarletSource, blend_converged, xy0, flux):
753 """Add a child to a catalog
755 This creates a new child in the source catalog,
756 assigning it a parent id, adding a footprint,
757 and setting all appropriate flags based on the
758 deblender result.
759 """
760 assert len(heavy.getPeaks()) == 1
761 src = sources.addNew()
762 for key in self.toCopyFromParent:
763 src.set(key, parent.get(key))
764 src.assign(heavy.getPeaks()[0], self.peakSchemaMapper)
765 src.setParent(parent.getId())
766 src.setFootprint(heavy)
767 # Set the psf key based on whether or not the source was
768 # deblended using the PointSource model.
769 # This key is not that useful anymore since we now keep track of
770 # `modelType`, but we continue to propagate it in case code downstream
771 # is expecting it.
772 src.set(self.psfKey, scarletSource.__class__.__name__ == "PointSource")
773 src.set(self.runtimeKey, 0)
774 src.set(self.blendConvergenceFailedFlagKey, not blend_converged)
776 # Set the position of the peak from the parent footprint
777 # This will make it easier to match the same source across
778 # deblenders and across observations, where the peak
779 # position is unlikely to change unless enough time passes
780 # for a source to move on the sky.
781 peak = scarletSource.detectedPeak
782 src.set(self.peakCenter, Point2I(peak["i_x"], peak["i_y"]))
783 src.set(self.peakIdKey, peak["id"])
785 # The children have a single peak
786 src.set(self.nPeaksKey, 1)
788 # Store the flux at the center of the model and the total
789 # scarlet flux measurement.
790 morph = afwDet.multiband.heavyFootprintToImage(heavy).image.array
792 # Set the flux at the center of the model (for SNR)
793 try:
794 cy, cx = scarletSource.center
795 cy = np.max([np.min([int(np.round(cy)), morph.shape[0]-1]), 0])
796 cx = np.max([np.min([int(np.round(cx)), morph.shape[1]-1]), 0])
797 src.set(self.modelCenterFlux, morph[cy, cx])
798 except AttributeError:
799 msg = "Did not recognize coordinates for source type of `{0}`, "
800 msg += "could not write coordinates or center flux. "
801 msg += "Add `{0}` to meas_extensions_scarlet to properly persist this information."
802 logger.warning(msg.format(type(scarletSource)))
804 src.set(self.modelTypeKey, scarletSource.__class__.__name__)
805 src.set(self.edgeFluxFlagKey, scarletSource.isEdge)
806 # Include the source flux in the model space in the catalog.
807 # This uses the narrower model PSF, which ensures that all sources
808 # not located on an edge have all of their flux included in the
809 # measurement.
810 src.set(self.scarletFluxKey, flux)
811 return src