Coverage for python/lsst/meas/extensions/scarlet/scarletDeblendTask.py: 15%

Shortcuts on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

473 statements  

1# This file is part of meas_extensions_scarlet. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (https://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <https://www.gnu.org/licenses/>. 

21 

22from functools import partial 

23import logging 

24import numpy as np 

25import scarlet 

26from scarlet.psf import ImagePSF, GaussianPSF 

27from scarlet import Blend, Frame, Observation 

28from scarlet.renderer import ConvolutionRenderer 

29from scarlet.detect import get_detect_wavelets 

30from scarlet.initialization import init_all_sources 

31from scarlet import lite 

32 

33import lsst.pex.config as pexConfig 

34from lsst.pex.exceptions import InvalidParameterError 

35import lsst.pipe.base as pipeBase 

36from lsst.geom import Point2I, Box2I, Point2D 

37import lsst.afw.geom.ellipses as afwEll 

38import lsst.afw.image as afwImage 

39import lsst.afw.detection as afwDet 

40import lsst.afw.table as afwTable 

41from lsst.utils.logging import PeriodicLogger 

42from lsst.utils.timer import timeMethod 

43 

44from .source import bboxToScarletBox, modelToHeavy, liteModelToHeavy 

45 

46# Scarlet and proxmin have a different definition of log levels than the stack, 

47# so even "warnings" occur far more often than we would like. 

48# So for now we only display scarlet and proxmin errors, as all other 

49# scarlet outputs would be considered "TRACE" by our standards. 

50scarletLogger = logging.getLogger("scarlet") 

51scarletLogger.setLevel(logging.ERROR) 

52proxminLogger = logging.getLogger("proxmin") 

53proxminLogger.setLevel(logging.ERROR) 

54 

55__all__ = ["deblend", "ScarletDeblendConfig", "ScarletDeblendTask"] 

56 

57logger = logging.getLogger(__name__) 

58 

59 

60class IncompleteDataError(Exception): 

61 """The PSF could not be computed due to incomplete data 

62 """ 

63 pass 

64 

65 

66class ScarletGradientError(Exception): 

67 """An error occurred during optimization 

68 

69 This error occurs when the optimizer encounters 

70 a NaN value while calculating the gradient. 

71 """ 

72 def __init__(self, iterations, sources): 

73 self.iterations = iterations 

74 self.sources = sources 

75 msg = ("ScalarGradientError in iteration {0}. " 

76 "NaN values introduced in sources {1}") 

77 self.message = msg.format(iterations, sources) 

78 

79 def __str__(self): 

80 return self.message 

81 

82 

83def _checkBlendConvergence(blend, f_rel): 

84 """Check whether or not a blend has converged 

85 """ 

86 deltaLoss = np.abs(blend.loss[-2] - blend.loss[-1]) 

87 convergence = f_rel * np.abs(blend.loss[-1]) 

88 return deltaLoss < convergence 

89 

90 

91def _getPsfFwhm(psf): 

92 """Calculate the FWHM of the `psf` 

93 """ 

94 return psf.computeShape().getDeterminantRadius() * 2.35 

95 

96 

97def _computePsfImage(self, position=None): 

98 """Get a multiband PSF image 

99 The PSF Kernel Image is computed for each band 

100 and combined into a (filter, y, x) array and stored 

101 as `self._psfImage`. 

102 The result is not cached, so if the same PSF is expected 

103 to be used multiple times it is a good idea to store the 

104 result in another variable. 

105 Note: this is a temporary fix during the deblender sprint. 

106 In the future this function will replace the current method 

107 in `afw.MultibandExposure.computePsfImage` (DM-19789). 

108 Parameters 

109 ---------- 

110 position : `Point2D` or `tuple` 

111 Coordinates to evaluate the PSF. If `position` is `None` 

112 then `Psf.getAveragePosition()` is used. 

113 Returns 

114 ------- 

115 self._psfImage: array 

116 The multiband PSF image. 

117 """ 

118 psfs = [] 

119 # Make the coordinates into a Point2D (if necessary) 

120 if not isinstance(position, Point2D) and position is not None: 

121 position = Point2D(position[0], position[1]) 

122 

123 for bidx, single in enumerate(self.singles): 

124 try: 

125 if position is None: 

126 psf = single.getPsf().computeImage() 

127 psfs.append(psf) 

128 else: 

129 psf = single.getPsf().computeKernelImage(position) 

130 psfs.append(psf) 

131 except InvalidParameterError: 

132 # This band failed to compute the PSF due to incomplete data 

133 # at that location. This is unlikely to be a problem for Rubin, 

134 # however the edges of some HSC COSMOS fields contain incomplete 

135 # data in some bands, so we track this error to distinguish it 

136 # from unknown errors. 

137 msg = "Failed to compute PSF at {} in band {}" 

138 raise IncompleteDataError(msg.format(position, self.filters[bidx])) 

139 

140 left = np.min([psf.getBBox().getMinX() for psf in psfs]) 

141 bottom = np.min([psf.getBBox().getMinY() for psf in psfs]) 

142 right = np.max([psf.getBBox().getMaxX() for psf in psfs]) 

143 top = np.max([psf.getBBox().getMaxY() for psf in psfs]) 

144 bbox = Box2I(Point2I(left, bottom), Point2I(right, top)) 

145 psfs = [afwImage.utils.projectImage(psf, bbox) for psf in psfs] 

146 psfImage = afwImage.MultibandImage.fromImages(self.filters, psfs) 

147 return psfImage 

148 

149 

150def getFootprintMask(footprint, mExposure): 

151 """Mask pixels outside the footprint 

152 

153 Parameters 

154 ---------- 

155 mExposure : `lsst.image.MultibandExposure` 

156 - The multiband exposure containing the image, 

157 mask, and variance data 

158 footprint : `lsst.detection.Footprint` 

159 - The footprint of the parent to deblend 

160 

161 Returns 

162 ------- 

163 footprintMask : array 

164 Boolean array with pixels not in the footprint set to one. 

165 """ 

166 bbox = footprint.getBBox() 

167 fpMask = afwImage.Mask(bbox) 

168 footprint.spans.setMask(fpMask, 1) 

169 fpMask = ~fpMask.getArray().astype(bool) 

170 return fpMask 

171 

172 

173def isPseudoSource(source, pseudoColumns): 

174 """Check if a source is a pseudo source. 

175 

176 This is mostly for skipping sky objects, 

177 but any other column can also be added to disable 

178 deblending on a parent or individual source when 

179 set to `True`. 

180 

181 Parameters 

182 ---------- 

183 source : `lsst.afw.table.source.source.SourceRecord` 

184 The source to check for the pseudo bit. 

185 pseudoColumns : `list` of `str` 

186 A list of columns to check for pseudo sources. 

187 """ 

188 isPseudo = False 

189 for col in pseudoColumns: 

190 try: 

191 isPseudo |= source[col] 

192 except KeyError: 

193 pass 

194 return isPseudo 

195 

196 

197def deblend(mExposure, footprint, config): 

198 """Deblend a parent footprint 

199 

200 Parameters 

201 ---------- 

202 mExposure : `lsst.image.MultibandExposure` 

203 - The multiband exposure containing the image, 

204 mask, and variance data 

205 footprint : `lsst.detection.Footprint` 

206 - The footprint of the parent to deblend 

207 config : `ScarletDeblendConfig` 

208 - Configuration of the deblending task 

209 

210 Returns 

211 ------- 

212 blend : `scarlet.Blend` 

213 The scarlet blend class that contains all of the information 

214 about the parameters and results from scarlet 

215 skipped : `list` of `int` 

216 The indices of any children that failed to initialize 

217 and were skipped. 

218 spectrumInit : `bool` 

219 Whether or not all of the sources were initialized by jointly 

220 fitting their SED's. This provides a better initialization 

221 but created memory issues when a blend is too large or 

222 contains too many sources. 

223 """ 

224 # Extract coordinates from each MultiColorPeak 

225 bbox = footprint.getBBox() 

226 

227 # Create the data array from the masked images 

228 images = mExposure.image[:, bbox].array 

229 

230 # Use the inverse variance as the weights 

231 if config.useWeights: 

232 weights = 1/mExposure.variance[:, bbox].array 

233 else: 

234 weights = np.ones_like(images) 

235 badPixels = mExposure.mask.getPlaneBitMask(config.badMask) 

236 mask = mExposure.mask[:, bbox].array & badPixels 

237 weights[mask > 0] = 0 

238 

239 # Mask out the pixels outside the footprint 

240 mask = getFootprintMask(footprint, mExposure) 

241 weights *= ~mask 

242 

243 psfs = _computePsfImage(mExposure, footprint.getCentroid()).array.astype(np.float32) 

244 psfs = ImagePSF(psfs) 

245 model_psf = GaussianPSF(sigma=(config.modelPsfSigma,)*len(mExposure.filters)) 

246 

247 frame = Frame(images.shape, psf=model_psf, channels=mExposure.filters) 

248 observation = Observation(images, psf=psfs, weights=weights, channels=mExposure.filters) 

249 if config.convolutionType == "fft": 

250 observation.match(frame) 

251 elif config.convolutionType == "real": 

252 renderer = ConvolutionRenderer(observation, frame, convolution_type="real") 

253 observation.match(frame, renderer=renderer) 

254 else: 

255 raise ValueError("Unrecognized convolution type {}".format(config.convolutionType)) 

256 

257 assert(config.sourceModel in ["single", "double", "compact", "fit"]) 

258 

259 # Set the appropriate number of components 

260 if config.sourceModel == "single": 

261 maxComponents = 1 

262 elif config.sourceModel == "double": 

263 maxComponents = 2 

264 elif config.sourceModel == "compact": 

265 maxComponents = 0 

266 elif config.sourceModel == "point": 

267 raise NotImplementedError("Point source photometry is currently not implemented") 

268 elif config.sourceModel == "fit": 

269 # It is likely in the future that there will be some heuristic 

270 # used to determine what type of model to use for each source, 

271 # but that has not yet been implemented (see DM-22551) 

272 raise NotImplementedError("sourceModel 'fit' has not been implemented yet") 

273 

274 # Convert the centers to pixel coordinates 

275 xmin = bbox.getMinX() 

276 ymin = bbox.getMinY() 

277 centers = [ 

278 np.array([peak.getIy() - ymin, peak.getIx() - xmin], dtype=int) 

279 for peak in footprint.peaks 

280 if not isPseudoSource(peak, config.pseudoColumns) 

281 ] 

282 

283 # Choose whether or not to use the improved spectral initialization 

284 if config.setSpectra: 

285 if config.maxSpectrumCutoff <= 0: 

286 spectrumInit = True 

287 else: 

288 spectrumInit = len(centers) * bbox.getArea() < config.maxSpectrumCutoff 

289 else: 

290 spectrumInit = False 

291 

292 # Only deblend sources that can be initialized 

293 sources, skipped = init_all_sources( 

294 frame=frame, 

295 centers=centers, 

296 observations=observation, 

297 thresh=config.morphThresh, 

298 max_components=maxComponents, 

299 min_snr=config.minSNR, 

300 shifting=False, 

301 fallback=config.fallback, 

302 silent=config.catchFailures, 

303 set_spectra=spectrumInit, 

304 ) 

305 

306 # Attach the peak to all of the initialized sources 

307 srcIndex = 0 

308 for k, center in enumerate(centers): 

309 if k not in skipped: 

310 # This is just to make sure that there isn't a coding bug 

311 assert np.all(sources[srcIndex].center == center) 

312 # Store the record for the peak with the appropriate source 

313 sources[srcIndex].detectedPeak = footprint.peaks[k] 

314 srcIndex += 1 

315 

316 # Create the blend and attempt to optimize it 

317 blend = Blend(sources, observation) 

318 try: 

319 blend.fit(max_iter=config.maxIter, e_rel=config.relativeError) 

320 except ArithmeticError: 

321 # This occurs when a gradient update produces a NaN value 

322 # This is usually due to a source initialized with a 

323 # negative SED or no flux, often because the peak 

324 # is a noise fluctuation in one band and not a real source. 

325 iterations = len(blend.loss) 

326 failedSources = [] 

327 for k, src in enumerate(sources): 

328 if np.any(~np.isfinite(src.get_model())): 

329 failedSources.append(k) 

330 raise ScarletGradientError(iterations, failedSources) 

331 

332 return blend, skipped, spectrumInit 

333 

334 

335def deblend_lite(mExposure, footprint, config, wavelets=None): 

336 """Deblend a parent footprint 

337 

338 Parameters 

339 ---------- 

340 mExposure : `lsst.image.MultibandExposure` 

341 - The multiband exposure containing the image, 

342 mask, and variance data 

343 footprint : `lsst.detection.Footprint` 

344 - The footprint of the parent to deblend 

345 config : `ScarletDeblendConfig` 

346 - Configuration of the deblending task 

347 """ 

348 # Extract coordinates from each MultiColorPeak 

349 bbox = footprint.getBBox() 

350 

351 # Create the data array from the masked images 

352 images = mExposure.image[:, bbox].array 

353 variance = mExposure.variance[:, bbox].array 

354 

355 # Use the inverse variance as the weights 

356 if config.useWeights: 

357 weights = 1/mExposure.variance[:, bbox].array 

358 else: 

359 weights = np.ones_like(images) 

360 badPixels = mExposure.mask.getPlaneBitMask(config.badMask) 

361 mask = mExposure.mask[:, bbox].array & badPixels 

362 weights[mask > 0] = 0 

363 

364 # Mask out the pixels outside the footprint 

365 mask = getFootprintMask(footprint, mExposure) 

366 weights *= ~mask 

367 

368 psfs = _computePsfImage(mExposure, footprint.getCentroid()).array.astype(np.float32) 

369 modelPsf = lite.integrated_circular_gaussian(sigma=config.modelPsfSigma) 

370 

371 observation = lite.LiteObservation( 

372 images=images, 

373 variance=variance, 

374 weights=weights, 

375 psfs=psfs, 

376 model_psf=modelPsf[None, :, :], 

377 convolution_mode=config.convolutionType, 

378 ) 

379 

380 # Convert the centers to pixel coordinates 

381 xmin = bbox.getMinX() 

382 ymin = bbox.getMinY() 

383 centers = [ 

384 np.array([peak.getIy() - ymin, peak.getIx() - xmin], dtype=int) 

385 for peak in footprint.peaks 

386 if not isPseudoSource(peak, config.pseudoColumns) 

387 ] 

388 

389 # Initialize the sources 

390 if config.morphImage == "chi2": 

391 sources = lite.init_all_sources_main( 

392 observation, 

393 centers, 

394 min_snr=config.minSNR, 

395 thresh=config.morphThresh, 

396 ) 

397 elif config.morphImage == "wavelet": 

398 _bbox = bboxToScarletBox(len(mExposure.filters), bbox, bbox.getMin()) 

399 _wavelets = wavelets[(slice(None), *_bbox[1:].slices)] 

400 sources = lite.init_all_sources_wavelets( 

401 observation, 

402 centers, 

403 use_psf=False, 

404 wavelets=_wavelets, 

405 min_snr=config.minSNR, 

406 ) 

407 else: 

408 raise ValueError("morphImage must be either 'chi2' or 'wavelet'.") 

409 

410 # Set the optimizer 

411 if config.optimizer == "adaprox": 

412 parameterization = partial( 

413 lite.init_adaprox_component, 

414 bg_thresh=config.backgroundThresh, 

415 max_prox_iter=config.maxProxIter, 

416 ) 

417 elif config.optimizer == "fista": 

418 parameterization = partial( 

419 lite.init_fista_component, 

420 bg_thresh=config.backgroundThresh, 

421 ) 

422 else: 

423 raise ValueError("Unrecognized optimizer. Must be either 'adaprox' or 'fista'.") 

424 sources = lite.parameterize_sources(sources, observation, parameterization) 

425 

426 # Attach the peak to all of the initialized sources 

427 for k, center in enumerate(centers): 

428 # This is just to make sure that there isn't a coding bug 

429 if len(sources[k].components) > 0 and np.any(sources[k].center != center): 

430 raise ValueError("Misaligned center, expected {center} but got {sources[k].center}") 

431 # Store the record for the peak with the appropriate source 

432 sources[k].detectedPeak = footprint.peaks[k] 

433 

434 blend = lite.LiteBlend(sources, observation) 

435 

436 # Initialize each source with its best fit spectrum 

437 # This significantly cuts down on the number of iterations 

438 # that the optimizer needs and usually results in a better 

439 # fit, but using least squares on a very large blend causes memory issues. 

440 # This is typically the most expensive operation in deblending, memorywise. 

441 spectrumInit = False 

442 if config.setSpectra: 

443 if config.maxSpectrumCutoff <= 0 or len(centers) * bbox.getArea() < config.maxSpectrumCutoff: 

444 spectrumInit = True 

445 blend.fit_spectra() 

446 

447 # Set the sources that could not be initialized and were skipped 

448 skipped = [src for src in sources if src.is_null] 

449 

450 blend.fit(max_iter=config.maxIter, e_rel=config.relativeError, min_iter=config.minIter) 

451 

452 return blend, skipped, spectrumInit 

453 

454 

455class ScarletDeblendConfig(pexConfig.Config): 

456 """MultibandDeblendConfig 

457 

458 Configuration for the multiband deblender. 

459 The parameters are organized by the parameter types, which are 

460 - Stopping Criteria: Used to determine if the fit has converged 

461 - Position Fitting Criteria: Used to fit the positions of the peaks 

462 - Constraints: Used to apply constraints to the peaks and their components 

463 - Other: Parameters that don't fit into the above categories 

464 """ 

465 # Stopping Criteria 

466 minIter = pexConfig.Field(dtype=int, default=1, 

467 doc="Minimum number of iterations before the optimizer is allowed to stop.") 

468 maxIter = pexConfig.Field(dtype=int, default=300, 

469 doc=("Maximum number of iterations to deblend a single parent")) 

470 relativeError = pexConfig.Field(dtype=float, default=1e-2, 

471 doc=("Change in the loss function between iterations to exit fitter. " 

472 "Typically this is `1e-2` if measurements will be made on the " 

473 "flux re-distributed models and `1e-4` when making measurements " 

474 "on the models themselves.")) 

475 

476 # Constraints 

477 morphThresh = pexConfig.Field(dtype=float, default=1, 

478 doc="Fraction of background RMS a pixel must have" 

479 "to be included in the initial morphology") 

480 # Lite Parameters 

481 # All of these parameters (except version) are only valid if version='lite' 

482 version = pexConfig.ChoiceField( 

483 dtype=str, 

484 default="lite", 

485 allowed={ 

486 "scarlet": "main scarlet version (likely to be deprecated soon)", 

487 "lite": "Optimized version of scarlet for survey data from a single instrument", 

488 }, 

489 doc="The version of scarlet to use.", 

490 ) 

491 optimizer = pexConfig.ChoiceField( 

492 dtype=str, 

493 default="adaprox", 

494 allowed={ 

495 "adaprox": "Proximal ADAM optimization", 

496 "fista": "Accelerated proximal gradient method", 

497 }, 

498 doc="The optimizer to use for fitting parameters and is only used when version='lite'", 

499 ) 

500 morphImage = pexConfig.ChoiceField( 

501 dtype=str, 

502 default="chi2", 

503 allowed={ 

504 "chi2": "Initialize sources on a chi^2 image made from all available bands", 

505 "wavelet": "Initialize sources using a wavelet decomposition of the chi^2 image", 

506 }, 

507 doc="The type of image to use for initializing the morphology. " 

508 "Must be either 'chi2' or 'wavelet'. " 

509 ) 

510 backgroundThresh = pexConfig.Field( 

511 dtype=float, 

512 default=0.25, 

513 doc="Fraction of background to use for a sparsity threshold. " 

514 "This prevents sources from growing unrealistically outside " 

515 "the parent footprint while still modeling flux correctly " 

516 "for bright sources." 

517 ) 

518 maxProxIter = pexConfig.Field( 

519 dtype=int, 

520 default=1, 

521 doc="Maximum number of proximal operator iterations inside of each " 

522 "iteration of the optimizer. " 

523 "This config field is only used if version='lite' and optimizer='adaprox'." 

524 ) 

525 waveletScales = pexConfig.Field( 

526 dtype=int, 

527 default=5, 

528 doc="Number of wavelet scales to use for wavelet initialization. " 

529 "This field is only used when `version`='lite' and `morphImage`='wavelet'." 

530 ) 

531 

532 # Other scarlet paremeters 

533 useWeights = pexConfig.Field( 

534 dtype=bool, default=True, 

535 doc=("Whether or not use use inverse variance weighting." 

536 "If `useWeights` is `False` then flat weights are used")) 

537 modelPsfSize = pexConfig.Field( 

538 dtype=int, default=11, 

539 doc="Model PSF side length in pixels") 

540 modelPsfSigma = pexConfig.Field( 

541 dtype=float, default=0.8, 

542 doc="Define sigma for the model frame PSF") 

543 minSNR = pexConfig.Field( 

544 dtype=float, default=50, 

545 doc="Minimum Signal to noise to accept the source." 

546 "Sources with lower flux will be initialized with the PSF but updated " 

547 "like an ordinary ExtendedSource (known in scarlet as a `CompactSource`).") 

548 saveTemplates = pexConfig.Field( 

549 dtype=bool, default=True, 

550 doc="Whether or not to save the SEDs and templates") 

551 processSingles = pexConfig.Field( 

552 dtype=bool, default=True, 

553 doc="Whether or not to process isolated sources in the deblender") 

554 convolutionType = pexConfig.Field( 

555 dtype=str, default="fft", 

556 doc="Type of convolution to render the model to the observations.\n" 

557 "- 'fft': perform convolutions in Fourier space\n" 

558 "- 'real': peform convolutions in real space.") 

559 sourceModel = pexConfig.Field( 

560 dtype=str, default="double", 

561 doc=("How to determine which model to use for sources, from\n" 

562 "- 'single': use a single component for all sources\n" 

563 "- 'double': use a bulge disk model for all sources\n" 

564 "- 'compact': use a single component model, initialzed with a point source morphology, " 

565 " for all sources\n" 

566 "- 'point': use a point-source model for all sources\n" 

567 "- 'fit: use a PSF fitting model to determine the number of components (not yet implemented)"), 

568 deprecated="This field will be deprecated when the default for `version` is changed to `lite`.", 

569 ) 

570 setSpectra = pexConfig.Field( 

571 dtype=bool, default=True, 

572 doc="Whether or not to solve for the best-fit spectra during initialization. " 

573 "This makes initialization slightly longer, as it requires a convolution " 

574 "to set the optimal spectra, but results in a much better initial log-likelihood " 

575 "and reduced total runtime, with convergence in fewer iterations." 

576 "This option is only used when " 

577 "peaks*area < `maxSpectrumCutoff` will use the improved initialization.") 

578 

579 # Mask-plane restrictions 

580 badMask = pexConfig.ListField( 

581 dtype=str, default=["BAD", "CR", "NO_DATA", "SAT", "SUSPECT", "EDGE"], 

582 doc="Whether or not to process isolated sources in the deblender") 

583 statsMask = pexConfig.ListField(dtype=str, default=["SAT", "INTRP", "NO_DATA"], 

584 doc="Mask planes to ignore when performing statistics") 

585 maskLimits = pexConfig.DictField( 

586 keytype=str, 

587 itemtype=float, 

588 default={}, 

589 doc=("Mask planes with the corresponding limit on the fraction of masked pixels. " 

590 "Sources violating this limit will not be deblended."), 

591 ) 

592 

593 # Size restrictions 

594 maxNumberOfPeaks = pexConfig.Field( 

595 dtype=int, default=0, 

596 doc=("Only deblend the brightest maxNumberOfPeaks peaks in the parent" 

597 " (<= 0: unlimited)")) 

598 maxFootprintArea = pexConfig.Field( 

599 dtype=int, default=1000000, 

600 doc=("Maximum area for footprints before they are ignored as large; " 

601 "non-positive means no threshold applied")) 

602 maxFootprintSize = pexConfig.Field( 

603 dtype=int, default=0, 

604 doc=("Maximum linear dimension for footprints before they are ignored " 

605 "as large; non-positive means no threshold applied")) 

606 minFootprintAxisRatio = pexConfig.Field( 

607 dtype=float, default=0.0, 

608 doc=("Minimum axis ratio for footprints before they are ignored " 

609 "as large; non-positive means no threshold applied")) 

610 maxSpectrumCutoff = pexConfig.Field( 

611 dtype=int, default=1000000, 

612 doc=("Maximum number of pixels * number of sources in a blend. " 

613 "This is different than `maxFootprintArea` because this isn't " 

614 "the footprint area but the area of the bounding box that " 

615 "contains the footprint, and is also multiplied by the number of" 

616 "sources in the footprint. This prevents large skinny blends with " 

617 "a high density of sources from running out of memory. " 

618 "If `maxSpectrumCutoff == -1` then there is no cutoff.") 

619 ) 

620 

621 # Failure modes 

622 fallback = pexConfig.Field( 

623 dtype=bool, default=True, 

624 doc="Whether or not to fallback to a smaller number of components if a source does not initialize" 

625 ) 

626 notDeblendedMask = pexConfig.Field( 

627 dtype=str, default="NOT_DEBLENDED", optional=True, 

628 doc="Mask name for footprints not deblended, or None") 

629 catchFailures = pexConfig.Field( 

630 dtype=bool, default=True, 

631 doc=("If True, catch exceptions thrown by the deblender, log them, " 

632 "and set a flag on the parent, instead of letting them propagate up")) 

633 

634 # Other options 

635 columnInheritance = pexConfig.DictField( 

636 keytype=str, itemtype=str, default={ 

637 "deblend_nChild": "deblend_parentNChild", 

638 "deblend_nPeaks": "deblend_parentNPeaks", 

639 "deblend_spectrumInitFlag": "deblend_spectrumInitFlag", 

640 "deblend_blendConvergenceFailedFlag": "deblend_blendConvergenceFailedFlag", 

641 }, 

642 doc="Columns to pass from the parent to the child. " 

643 "The key is the name of the column for the parent record, " 

644 "the value is the name of the column to use for the child." 

645 ) 

646 pseudoColumns = pexConfig.ListField( 

647 dtype=str, default=['merge_peak_sky', 'sky_source'], 

648 doc="Names of flags which should never be deblended." 

649 ) 

650 

651 # Logging option(s) 

652 loggingInterval = pexConfig.Field( 

653 dtype=int, default=600, 

654 doc="Interval (in seconds) to log messages (at VERBOSE level) while deblending sources.", 

655 deprecated="This field is no longer used and will be removed in v25.", 

656 ) 

657 # Testing options 

658 # Some obs packages and ci packages run the full pipeline on a small 

659 # subset of data to test that the pipeline is functioning properly. 

660 # This is not meant as scientific validation, so it can be useful 

661 # to only run on a small subset of the data that is large enough to 

662 # test the desired pipeline features but not so long that the deblender 

663 # is the tall pole in terms of execution times. 

664 useCiLimits = pexConfig.Field( 

665 dtype=bool, default=False, 

666 doc="Limit the number of sources deblended for CI to prevent long build times") 

667 ciDeblendChildRange = pexConfig.ListField( 

668 dtype=int, default=[5, 10], 

669 doc="Only deblend parent Footprints with a number of peaks in the (inclusive) range indicated." 

670 "If `useCiLimits==False` then this parameter is ignored.") 

671 ciNumParentsToDeblend = pexConfig.Field( 

672 dtype=int, default=10, 

673 doc="Only use the first `ciNumParentsToDeblend` parent footprints with a total peak count " 

674 "within `ciDebledChildRange`. " 

675 "If `useCiLimits==False` then this parameter is ignored.") 

676 

677 

678class ScarletDeblendTask(pipeBase.Task): 

679 """ScarletDeblendTask 

680 

681 Split blended sources into individual sources. 

682 

683 This task has no return value; it only modifies the SourceCatalog in-place. 

684 """ 

685 ConfigClass = ScarletDeblendConfig 

686 _DefaultName = "scarletDeblend" 

687 

688 def __init__(self, schema, peakSchema=None, **kwargs): 

689 """Create the task, adding necessary fields to the given schema. 

690 

691 Parameters 

692 ---------- 

693 schema : `lsst.afw.table.schema.schema.Schema` 

694 Schema object for measurement fields; will be modified in-place. 

695 peakSchema : `lsst.afw.table.schema.schema.Schema` 

696 Schema of Footprint Peaks that will be passed to the deblender. 

697 Any fields beyond the PeakTable minimal schema will be transferred 

698 to the main source Schema. If None, no fields will be transferred 

699 from the Peaks. 

700 filters : list of str 

701 Names of the filters used for the eposures. This is needed to store 

702 the SED as a field 

703 **kwargs 

704 Passed to Task.__init__. 

705 """ 

706 pipeBase.Task.__init__(self, **kwargs) 

707 

708 peakMinimalSchema = afwDet.PeakTable.makeMinimalSchema() 

709 if peakSchema is None: 

710 # In this case, the peakSchemaMapper will transfer nothing, but 

711 # we'll still have one 

712 # to simplify downstream code 

713 self.peakSchemaMapper = afwTable.SchemaMapper(peakMinimalSchema, schema) 

714 else: 

715 self.peakSchemaMapper = afwTable.SchemaMapper(peakSchema, schema) 

716 for item in peakSchema: 

717 if item.key not in peakMinimalSchema: 

718 self.peakSchemaMapper.addMapping(item.key, item.field) 

719 # Because SchemaMapper makes a copy of the output schema 

720 # you give its ctor, it isn't updating this Schema in 

721 # place. That's probably a design flaw, but in the 

722 # meantime, we'll keep that schema in sync with the 

723 # peakSchemaMapper.getOutputSchema() manually, by adding 

724 # the same fields to both. 

725 schema.addField(item.field) 

726 assert schema == self.peakSchemaMapper.getOutputSchema(), "Logic bug mapping schemas" 

727 self._addSchemaKeys(schema) 

728 self.schema = schema 

729 self.toCopyFromParent = [item.key for item in self.schema 

730 if item.field.getName().startswith("merge_footprint")] 

731 

732 def _addSchemaKeys(self, schema): 

733 """Add deblender specific keys to the schema 

734 """ 

735 self.runtimeKey = schema.addField('deblend_runtime', type=np.float32, doc='runtime in ms') 

736 

737 self.iterKey = schema.addField('deblend_iterations', type=np.int32, doc='iterations to converge') 

738 

739 self.nChildKey = schema.addField('deblend_nChild', type=np.int32, 

740 doc='Number of children this object has (defaults to 0)') 

741 self.psfKey = schema.addField('deblend_deblendedAsPsf', type='Flag', 

742 doc='Deblender thought this source looked like a PSF') 

743 self.tooManyPeaksKey = schema.addField('deblend_tooManyPeaks', type='Flag', 

744 doc='Source had too many peaks; ' 

745 'only the brightest were included') 

746 self.tooBigKey = schema.addField('deblend_parentTooBig', type='Flag', 

747 doc='Parent footprint covered too many pixels') 

748 self.maskedKey = schema.addField('deblend_masked', type='Flag', 

749 doc='Parent footprint was predominantly masked') 

750 self.sedNotConvergedKey = schema.addField('deblend_sedConvergenceFailed', type='Flag', 

751 doc='scarlet sed optimization did not converge before' 

752 'config.maxIter') 

753 self.morphNotConvergedKey = schema.addField('deblend_morphConvergenceFailed', type='Flag', 

754 doc='scarlet morph optimization did not converge before' 

755 'config.maxIter') 

756 self.blendConvergenceFailedFlagKey = schema.addField('deblend_blendConvergenceFailedFlag', 

757 type='Flag', 

758 doc='at least one source in the blend' 

759 'failed to converge') 

760 self.edgePixelsKey = schema.addField('deblend_edgePixels', type='Flag', 

761 doc='Source had flux on the edge of the parent footprint') 

762 self.deblendFailedKey = schema.addField('deblend_failed', type='Flag', 

763 doc="Deblending failed on source") 

764 self.deblendErrorKey = schema.addField('deblend_error', type="String", size=25, 

765 doc='Name of error if the blend failed') 

766 self.deblendSkippedKey = schema.addField('deblend_skipped', type='Flag', 

767 doc="Deblender skipped this source") 

768 self.peakCenter = afwTable.Point2IKey.addFields(schema, name="deblend_peak_center", 

769 doc="Center used to apply constraints in scarlet", 

770 unit="pixel") 

771 self.peakIdKey = schema.addField("deblend_peakId", type=np.int32, 

772 doc="ID of the peak in the parent footprint. " 

773 "This is not unique, but the combination of 'parent'" 

774 "and 'peakId' should be for all child sources. " 

775 "Top level blends with no parents have 'peakId=0'") 

776 self.modelCenterFlux = schema.addField('deblend_peak_instFlux', type=float, units='count', 

777 doc="The instFlux at the peak position of deblended mode") 

778 self.modelTypeKey = schema.addField("deblend_modelType", type="String", size=25, 

779 doc="The type of model used, for example " 

780 "MultiExtendedSource, SingleExtendedSource, PointSource") 

781 self.nPeaksKey = schema.addField("deblend_nPeaks", type=np.int32, 

782 doc="Number of initial peaks in the blend. " 

783 "This includes peaks that may have been culled " 

784 "during deblending or failed to deblend") 

785 self.parentNPeaksKey = schema.addField("deblend_parentNPeaks", type=np.int32, 

786 doc="deblend_nPeaks from this records parent.") 

787 self.parentNChildKey = schema.addField("deblend_parentNChild", type=np.int32, 

788 doc="deblend_nChild from this records parent.") 

789 self.scarletFluxKey = schema.addField("deblend_scarletFlux", type=np.float32, 

790 doc="Flux measurement from scarlet") 

791 self.scarletLogLKey = schema.addField("deblend_logL", type=np.float32, 

792 doc="Final logL, used to identify regressions in scarlet.") 

793 self.scarletSpectrumInitKey = schema.addField("deblend_spectrumInitFlag", type='Flag', 

794 doc="True when scarlet initializes sources " 

795 "in the blend with a more accurate spectrum. " 

796 "The algorithm uses a lot of memory, " 

797 "so large dense blends will use " 

798 "a less accurate initialization.") 

799 

800 # self.log.trace('Added keys to schema: %s', ", ".join(str(x) for x in 

801 # (self.nChildKey, self.tooManyPeaksKey, self.tooBigKey)) 

802 # ) 

803 

804 @timeMethod 

805 def run(self, mExposure, mergedSources): 

806 """Get the psf from each exposure and then run deblend(). 

807 

808 Parameters 

809 ---------- 

810 mExposure : `MultibandExposure` 

811 The exposures should be co-added images of the same 

812 shape and region of the sky. 

813 mergedSources : `SourceCatalog` 

814 The merged `SourceCatalog` that contains parent footprints 

815 to (potentially) deblend. 

816 

817 Returns 

818 ------- 

819 templateCatalogs: dict 

820 Keys are the names of the filters and the values are 

821 `lsst.afw.table.source.source.SourceCatalog`'s. 

822 These are catalogs with heavy footprints that are the templates 

823 created by the multiband templates. 

824 """ 

825 return self.deblend(mExposure, mergedSources) 

826 

827 @timeMethod 

828 def deblend(self, mExposure, catalog): 

829 """Deblend a data cube of multiband images 

830 

831 Parameters 

832 ---------- 

833 mExposure : `MultibandExposure` 

834 The exposures should be co-added images of the same 

835 shape and region of the sky. 

836 catalog : `SourceCatalog` 

837 The merged `SourceCatalog` that contains parent footprints 

838 to (potentially) deblend. The new deblended sources are 

839 appended to this catalog in place. 

840 

841 Returns 

842 ------- 

843 catalogs : `dict` or `None` 

844 Keys are the names of the filters and the values are 

845 `lsst.afw.table.source.source.SourceCatalog`'s. 

846 These are catalogs with heavy footprints that are the templates 

847 created by the multiband templates. 

848 """ 

849 import time 

850 

851 # Cull footprints if required by ci 

852 if self.config.useCiLimits: 

853 self.log.info("Using CI catalog limits, the original number of sources to deblend was %d.", 

854 len(catalog)) 

855 # Select parents with a number of children in the range 

856 # config.ciDeblendChildRange 

857 minChildren, maxChildren = self.config.ciDeblendChildRange 

858 nPeaks = np.array([len(src.getFootprint().peaks) for src in catalog]) 

859 childrenInRange = np.where((nPeaks >= minChildren) & (nPeaks <= maxChildren))[0] 

860 if len(childrenInRange) < self.config.ciNumParentsToDeblend: 

861 raise ValueError("Fewer than ciNumParentsToDeblend children were contained in the range " 

862 "indicated by ciDeblendChildRange. Adjust this range to include more " 

863 "parents.") 

864 # Keep all of the isolated parents and the first 

865 # `ciNumParentsToDeblend` children 

866 parents = nPeaks == 1 

867 children = np.zeros((len(catalog),), dtype=bool) 

868 children[childrenInRange[:self.config.ciNumParentsToDeblend]] = True 

869 catalog = catalog[parents | children] 

870 # We need to update the IdFactory, otherwise the the source ids 

871 # will not be sequential 

872 idFactory = catalog.getIdFactory() 

873 maxId = np.max(catalog["id"]) 

874 idFactory.notify(maxId) 

875 

876 filters = mExposure.filters 

877 self.log.info("Deblending %d sources in %d exposure bands", len(catalog), len(mExposure)) 

878 periodicLog = PeriodicLogger(self.log) 

879 

880 # Create a set of wavelet coefficients if using wavelet initialization 

881 if self.config.version == "lite" and self.config.morphImage == "wavelet": 

882 images = mExposure.image.array 

883 variance = mExposure.variance.array 

884 wavelets = get_detect_wavelets(images, variance, scales=self.config.waveletScales) 

885 else: 

886 wavelets = None 

887 

888 # Add the NOT_DEBLENDED mask to the mask plane in each band 

889 if self.config.notDeblendedMask: 

890 for mask in mExposure.mask: 

891 mask.addMaskPlane(self.config.notDeblendedMask) 

892 

893 nParents = len(catalog) 

894 nDeblendedParents = 0 

895 skippedParents = [] 

896 multibandColumns = { 

897 "heavies": [], 

898 "fluxes": [], 

899 "centerFluxes": [], 

900 } 

901 weightedColumns = { 

902 "heavies": [], 

903 "fluxes": [], 

904 "centerFluxes": [], 

905 } 

906 for parentIndex in range(nParents): 

907 parent = catalog[parentIndex] 

908 foot = parent.getFootprint() 

909 bbox = foot.getBBox() 

910 peaks = foot.getPeaks() 

911 

912 # Since we use the first peak for the parent object, we should 

913 # propagate its flags to the parent source. 

914 parent.assign(peaks[0], self.peakSchemaMapper) 

915 

916 # Skip isolated sources unless processSingles is turned on. 

917 # Note: this does not flag isolated sources as skipped or 

918 # set the NOT_DEBLENDED mask in the exposure, 

919 # since these aren't really a skipped blends. 

920 # We also skip pseudo sources, like sky objects, which 

921 # are intended to be skipped 

922 if ((len(peaks) < 2 and not self.config.processSingles) 

923 or isPseudoSource(parent, self.config.pseudoColumns)): 

924 self._updateParentRecord( 

925 parent=parent, 

926 nPeaks=len(peaks), 

927 nChild=0, 

928 runtime=np.nan, 

929 iterations=0, 

930 logL=np.nan, 

931 spectrumInit=False, 

932 converged=False, 

933 ) 

934 continue 

935 

936 # Block of conditions for skipping a parent with multiple children 

937 skipKey = None 

938 if self._isLargeFootprint(foot): 

939 # The footprint is above the maximum footprint size limit 

940 skipKey = self.tooBigKey 

941 skipMessage = f"Parent {parent.getId()}: skipping large footprint" 

942 elif self._isMasked(foot, mExposure): 

943 # The footprint exceeds the maximum number of masked pixels 

944 skipKey = self.maskedKey 

945 skipMessage = f"Parent {parent.getId()}: skipping masked footprint" 

946 elif self.config.maxNumberOfPeaks > 0 and len(peaks) > self.config.maxNumberOfPeaks: 

947 # Unlike meas_deblender, in scarlet we skip the entire blend 

948 # if the number of peaks exceeds max peaks, since neglecting 

949 # to model any peaks often results in catastrophic failure 

950 # of scarlet to generate models for the brighter sources. 

951 skipKey = self.tooManyPeaksKey 

952 skipMessage = f"Parent {parent.getId()}: Too many peaks, skipping blend" 

953 if skipKey is not None: 

954 self._skipParent( 

955 parent=parent, 

956 skipKey=skipKey, 

957 logMessage=skipMessage, 

958 ) 

959 skippedParents.append(parentIndex) 

960 continue 

961 

962 nDeblendedParents += 1 

963 self.log.trace("Parent %d: deblending %d peaks", parent.getId(), len(peaks)) 

964 # Run the deblender 

965 blendError = None 

966 try: 

967 t0 = time.monotonic() 

968 # Build the parameter lists with the same ordering 

969 if self.config.version == "scarlet": 

970 blend, skipped, spectrumInit = deblend(mExposure, foot, self.config) 

971 elif self.config.version == "lite": 

972 blend, skipped, spectrumInit = deblend_lite(mExposure, foot, self.config, wavelets) 

973 tf = time.monotonic() 

974 runtime = (tf-t0)*1000 

975 converged = _checkBlendConvergence(blend, self.config.relativeError) 

976 

977 scarletSources = [src for src in blend.sources] 

978 nChild = len(scarletSources) 

979 # Catch all errors and filter out the ones that we know about 

980 except Exception as e: 

981 blendError = type(e).__name__ 

982 if isinstance(e, ScarletGradientError): 

983 parent.set(self.iterKey, e.iterations) 

984 elif not isinstance(e, IncompleteDataError): 

985 blendError = "UnknownError" 

986 if self.config.catchFailures: 

987 # Make it easy to find UnknownErrors in the log file 

988 self.log.warn("UnknownError") 

989 import traceback 

990 traceback.print_exc() 

991 else: 

992 raise 

993 

994 self._skipParent( 

995 parent=parent, 

996 skipKey=self.deblendFailedKey, 

997 logMessage=f"Unable to deblend source {parent.getId}: {blendError}", 

998 ) 

999 parent.set(self.deblendErrorKey, blendError) 

1000 skippedParents.append(parentIndex) 

1001 continue 

1002 

1003 # Update the parent record with the deblending results 

1004 if self.config.version == "scarlet": 

1005 logL = -blend.loss[-1] + blend.observations[0].log_norm 

1006 elif self.config.version == "lite": 

1007 logL = blend.loss[-1] 

1008 self._updateParentRecord( 

1009 parent=parent, 

1010 nPeaks=len(peaks), 

1011 nChild=nChild, 

1012 runtime=runtime, 

1013 iterations=len(blend.loss), 

1014 logL=logL, 

1015 spectrumInit=spectrumInit, 

1016 converged=converged, 

1017 ) 

1018 

1019 # Add each deblended source to the catalog 

1020 for k, scarletSource in enumerate(scarletSources): 

1021 # Skip any sources with no flux or that scarlet skipped because 

1022 # it could not initialize 

1023 if k in skipped or (self.config.version == "lite" and scarletSource.is_null): 

1024 # No need to propagate anything 

1025 continue 

1026 parent.set(self.deblendSkippedKey, False) 

1027 if self.config.version == "lite": 

1028 mHeavy = liteModelToHeavy(scarletSource, mExposure, blend, xy0=bbox.getMin()) 

1029 weightedHeavy = liteModelToHeavy( 

1030 scarletSource, mExposure, blend, xy0=bbox.getMin(), useFlux=True) 

1031 weightedColumns["heavies"].append(weightedHeavy) 

1032 flux = scarletSource.get_model(use_flux=True).sum(axis=(1, 2)) 

1033 weightedColumns["fluxes"].append({ 

1034 filters[fidx]: _flux 

1035 for fidx, _flux in enumerate(flux) 

1036 }) 

1037 centerFlux = self._getCenterFlux(weightedHeavy, scarletSource, xy0=bbox.getMin()) 

1038 weightedColumns["centerFluxes"].append(centerFlux) 

1039 else: 

1040 mHeavy = modelToHeavy(scarletSource, mExposure, blend, xy0=bbox.getMin()) 

1041 multibandColumns["heavies"].append(mHeavy) 

1042 flux = scarlet.measure.flux(scarletSource) 

1043 multibandColumns["fluxes"].append({ 

1044 filters[fidx]: _flux 

1045 for fidx, _flux in enumerate(flux) 

1046 }) 

1047 centerFlux = self._getCenterFlux(mHeavy, scarletSource, xy0=bbox.getMin()) 

1048 multibandColumns["centerFluxes"].append(centerFlux) 

1049 

1050 # Add all fields except the HeavyFootprint to the 

1051 # source record 

1052 self._addChild( 

1053 parent=parent, 

1054 mHeavy=mHeavy, 

1055 catalog=catalog, 

1056 scarletSource=scarletSource, 

1057 ) 

1058 

1059 # Log a message if it has been a while since the last log. 

1060 periodicLog.log("Deblended %d parent sources out of %d", parentIndex + 1, nParents) 

1061 

1062 # Clear the cached values in scarlet to clear out memory 

1063 scarlet.cache.Cache._cache = {} 

1064 

1065 # Make sure that the number of new sources matches the number of 

1066 # entries in each of the band dependent columns. 

1067 # This should never trigger and is just a sanity check. 

1068 nChildren = len(catalog) - nParents 

1069 if np.any([len(meas) != nChildren for meas in multibandColumns.values()]): 

1070 msg = f"Added {len(catalog)-nParents} new sources, but have " 

1071 msg += ", ".join([ 

1072 f"{len(value)} {key}" 

1073 for key, value in multibandColumns.items() 

1074 ]) 

1075 raise RuntimeError(msg) 

1076 # Make a copy of the catlog in each band and update the footprints 

1077 catalogs = {} 

1078 for f in filters: 

1079 _catalog = afwTable.SourceCatalog(catalog.table.clone()) 

1080 _catalog.extend(catalog, deep=True) 

1081 

1082 # Update the footprints and columns that are different 

1083 # for each filter 

1084 for sourceIndex, source in enumerate(_catalog[nParents:]): 

1085 source.setFootprint(multibandColumns["heavies"][sourceIndex][f]) 

1086 source.set(self.scarletFluxKey, multibandColumns["fluxes"][sourceIndex][f]) 

1087 source.set(self.modelCenterFlux, multibandColumns["centerFluxes"][sourceIndex][f]) 

1088 catalogs[f] = _catalog 

1089 

1090 weightedCatalogs = {} 

1091 if self.config.version == "lite": 

1092 # Also create a catalog by reweighting the flux 

1093 weightedCatalogs = {} 

1094 for f in filters: 

1095 _catalog = afwTable.SourceCatalog(catalog.table.clone()) 

1096 _catalog.extend(catalog, deep=True) 

1097 

1098 # Update the footprints and columns that are different 

1099 # for each filter 

1100 for sourceIndex, source in enumerate(_catalog[nParents:]): 

1101 source.setFootprint(weightedColumns["heavies"][sourceIndex][f]) 

1102 source.set(self.scarletFluxKey, weightedColumns["fluxes"][sourceIndex][f]) 

1103 source.set(self.modelCenterFlux, weightedColumns["centerFluxes"][sourceIndex][f]) 

1104 weightedCatalogs[f] = _catalog 

1105 

1106 # Update the mExposure mask with the footprint of skipped parents 

1107 if self.config.notDeblendedMask: 

1108 for mask in mExposure.mask: 

1109 for parentIndex in skippedParents: 

1110 fp = _catalog[parentIndex].getFootprint() 

1111 fp.spans.setMask(mask, mask.getPlaneBitMask(self.config.notDeblendedMask)) 

1112 

1113 self.log.info("Deblender results: of %d parent sources, %d were deblended, " 

1114 "creating %d children, for a total of %d sources", 

1115 nParents, nDeblendedParents, nChildren, len(catalog)) 

1116 return catalogs, weightedCatalogs 

1117 

1118 def _isLargeFootprint(self, footprint): 

1119 """Returns whether a Footprint is large 

1120 

1121 'Large' is defined by thresholds on the area, size and axis ratio. 

1122 These may be disabled independently by configuring them to be 

1123 non-positive. 

1124 

1125 This is principally intended to get rid of satellite streaks, which the 

1126 deblender or other downstream processing can have trouble dealing with 

1127 (e.g., multiple large HeavyFootprints can chew up memory). 

1128 """ 

1129 if self.config.maxFootprintArea > 0 and footprint.getArea() > self.config.maxFootprintArea: 

1130 return True 

1131 if self.config.maxFootprintSize > 0: 

1132 bbox = footprint.getBBox() 

1133 if max(bbox.getWidth(), bbox.getHeight()) > self.config.maxFootprintSize: 

1134 return True 

1135 if self.config.minFootprintAxisRatio > 0: 

1136 axes = afwEll.Axes(footprint.getShape()) 

1137 if axes.getB() < self.config.minFootprintAxisRatio*axes.getA(): 

1138 return True 

1139 return False 

1140 

1141 def _isMasked(self, footprint, mExposure): 

1142 """Returns whether the footprint violates the mask limits""" 

1143 bbox = footprint.getBBox() 

1144 mask = np.bitwise_or.reduce(mExposure.mask[:, bbox].array, axis=0) 

1145 size = float(footprint.getArea()) 

1146 for maskName, limit in self.config.maskLimits.items(): 

1147 maskVal = mExposure.mask.getPlaneBitMask(maskName) 

1148 _mask = afwImage.MaskX(mask & maskVal, xy0=bbox.getMin()) 

1149 unmaskedSpan = footprint.spans.intersectNot(_mask) # spanset of unmasked pixels 

1150 if (size - unmaskedSpan.getArea())/size > limit: 

1151 return True 

1152 return False 

1153 

1154 def _skipParent(self, parent, skipKey, logMessage): 

1155 """Update a parent record that is not being deblended. 

1156 

1157 This is a fairly trivial function but is implemented to ensure 

1158 that a skipped parent updates the appropriate columns 

1159 consistently, and always has a flag to mark the reason that 

1160 it is being skipped. 

1161 

1162 Parameters 

1163 ---------- 

1164 parent : `lsst.afw.table.source.source.SourceRecord` 

1165 The parent record to flag as skipped. 

1166 skipKey : `bool` 

1167 The name of the flag to mark the reason for skipping. 

1168 logMessage : `str` 

1169 The message to display in a log.trace when a source 

1170 is skipped. 

1171 """ 

1172 if logMessage is not None: 

1173 self.log.trace(logMessage) 

1174 self._updateParentRecord( 

1175 parent=parent, 

1176 nPeaks=len(parent.getFootprint().peaks), 

1177 nChild=0, 

1178 runtime=np.nan, 

1179 iterations=0, 

1180 logL=np.nan, 

1181 spectrumInit=False, 

1182 converged=False, 

1183 ) 

1184 

1185 # Mark the source as skipped by the deblender and 

1186 # flag the reason why. 

1187 parent.set(self.deblendSkippedKey, True) 

1188 parent.set(skipKey, True) 

1189 

1190 def _updateParentRecord(self, parent, nPeaks, nChild, 

1191 runtime, iterations, logL, spectrumInit, converged): 

1192 """Update a parent record in all of the single band catalogs. 

1193 

1194 Ensure that all locations that update a parent record, 

1195 whether it is skipped or updated after deblending, 

1196 update all of the appropriate columns. 

1197 

1198 Parameters 

1199 ---------- 

1200 parent : `lsst.afw.table.source.source.SourceRecord` 

1201 The parent record to update. 

1202 nPeaks : `int` 

1203 Number of peaks in the parent footprint. 

1204 nChild : `int` 

1205 Number of children deblended from the parent. 

1206 This may differ from `nPeaks` if some of the peaks 

1207 were culled and have no deblended model. 

1208 runtime : `float` 

1209 Total runtime for deblending. 

1210 iterations : `int` 

1211 Total number of iterations in scarlet before convergence. 

1212 logL : `float` 

1213 Final log likelihood of the blend. 

1214 spectrumInit : `bool` 

1215 True when scarlet used `set_spectra` to initialize all 

1216 sources with better initial intensities. 

1217 converged : `bool` 

1218 True when the optimizer reached convergence before 

1219 reaching the maximum number of iterations. 

1220 """ 

1221 parent.set(self.nPeaksKey, nPeaks) 

1222 parent.set(self.nChildKey, nChild) 

1223 parent.set(self.runtimeKey, runtime) 

1224 parent.set(self.iterKey, iterations) 

1225 parent.set(self.scarletLogLKey, logL) 

1226 parent.set(self.scarletSpectrumInitKey, spectrumInit) 

1227 parent.set(self.blendConvergenceFailedFlagKey, converged) 

1228 

1229 def _addChild(self, parent, mHeavy, catalog, scarletSource): 

1230 """Add a child to a catalog. 

1231 

1232 This creates a new child in the source catalog, 

1233 assigning it a parent id, and adding all columns 

1234 that are independent across all filter bands. 

1235 

1236 Parameters 

1237 ---------- 

1238 parent : `lsst.afw.table.source.source.SourceRecord` 

1239 The parent of the new child record. 

1240 mHeavy : `lsst.detection.MultibandFootprint` 

1241 The multi-band footprint containing the model and 

1242 peak catalog for the new child record. 

1243 catalog : `lsst.afw.table.source.source.SourceCatalog` 

1244 The merged `SourceCatalog` that contains parent footprints 

1245 to (potentially) deblend. 

1246 scarletSource : `scarlet.Component` 

1247 The scarlet model for the new source record. 

1248 """ 

1249 src = catalog.addNew() 

1250 for key in self.toCopyFromParent: 

1251 src.set(key, parent.get(key)) 

1252 # The peak catalog is the same for all bands, 

1253 # so we just use the first peak catalog 

1254 peaks = mHeavy[mHeavy.filters[0]].peaks 

1255 src.assign(peaks[0], self.peakSchemaMapper) 

1256 src.setParent(parent.getId()) 

1257 # Currently all children only have a single peak, 

1258 # but it's possible in the future that there will be hierarchical 

1259 # deblending, so we use the footprint to set the number of peaks 

1260 # for each child. 

1261 src.set(self.nPeaksKey, len(peaks)) 

1262 # Set the psf key based on whether or not the source was 

1263 # deblended using the PointSource model. 

1264 # This key is not that useful anymore since we now keep track of 

1265 # `modelType`, but we continue to propagate it in case code downstream 

1266 # is expecting it. 

1267 src.set(self.psfKey, scarletSource.__class__.__name__ == "PointSource") 

1268 src.set(self.modelTypeKey, scarletSource.__class__.__name__) 

1269 # We set the runtime to zero so that summing up the 

1270 # runtime column will give the total time spent 

1271 # running the deblender for the catalog. 

1272 src.set(self.runtimeKey, 0) 

1273 

1274 # Set the position of the peak from the parent footprint 

1275 # This will make it easier to match the same source across 

1276 # deblenders and across observations, where the peak 

1277 # position is unlikely to change unless enough time passes 

1278 # for a source to move on the sky. 

1279 peak = scarletSource.detectedPeak 

1280 src.set(self.peakCenter, Point2I(peak["i_x"], peak["i_y"])) 

1281 src.set(self.peakIdKey, peak["id"]) 

1282 

1283 # Propagate columns from the parent to the child 

1284 for parentColumn, childColumn in self.config.columnInheritance.items(): 

1285 src.set(childColumn, parent.get(parentColumn)) 

1286 

1287 def _getCenterFlux(self, mHeavy, scarletSource, xy0): 

1288 """Get the flux at the center of a HeavyFootprint 

1289 

1290 Parameters 

1291 ---------- 

1292 mHeavy : `lsst.detection.MultibandFootprint` 

1293 The multi-band footprint containing the model for the source. 

1294 scarletSource : `scarlet.Component` 

1295 The scarlet model for the heavy footprint 

1296 """ 

1297 # Store the flux at the center of the model and the total 

1298 # scarlet flux measurement. 

1299 mImage = mHeavy.getImage(fill=0.0).image 

1300 

1301 # Set the flux at the center of the model (for SNR) 

1302 try: 

1303 cy, cx = scarletSource.center 

1304 cy += xy0.y 

1305 cx += xy0.x 

1306 return mImage[:, cx, cy] 

1307 except AttributeError: 

1308 msg = "Did not recognize coordinates for source type of `{0}`, " 

1309 msg += "could not write coordinates or center flux. " 

1310 msg += "Add `{0}` to meas_extensions_scarlet to properly persist this information." 

1311 logger.warning(msg.format(type(scarletSource))) 

1312 return {f: np.nan for f in mImage.filters}