Coverage for python/lsst/meas/extensions/scarlet/scarletDeblendTask.py: 15%

Shortcuts on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

472 statements  

1# This file is part of meas_extensions_scarlet. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (https://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <https://www.gnu.org/licenses/>. 

21 

22from functools import partial 

23import logging 

24import numpy as np 

25import scarlet 

26from scarlet.psf import ImagePSF, GaussianPSF 

27from scarlet import Blend, Frame, Observation 

28from scarlet.renderer import ConvolutionRenderer 

29from scarlet.detect import get_detect_wavelets 

30from scarlet.initialization import init_all_sources 

31from scarlet import lite 

32 

33import lsst.pex.config as pexConfig 

34from lsst.pex.exceptions import InvalidParameterError 

35import lsst.pipe.base as pipeBase 

36from lsst.geom import Point2I, Box2I, Point2D 

37import lsst.afw.geom.ellipses as afwEll 

38import lsst.afw.image as afwImage 

39import lsst.afw.detection as afwDet 

40import lsst.afw.table as afwTable 

41from lsst.utils.logging import PeriodicLogger 

42from lsst.utils.timer import timeMethod 

43 

44from .source import bboxToScarletBox, modelToHeavy, liteModelToHeavy 

45 

46# Scarlet and proxmin have a different definition of log levels than the stack, 

47# so even "warnings" occur far more often than we would like. 

48# So for now we only display scarlet and proxmin errors, as all other 

49# scarlet outputs would be considered "TRACE" by our standards. 

50scarletLogger = logging.getLogger("scarlet") 

51scarletLogger.setLevel(logging.ERROR) 

52proxminLogger = logging.getLogger("proxmin") 

53proxminLogger.setLevel(logging.ERROR) 

54 

55__all__ = ["deblend", "ScarletDeblendConfig", "ScarletDeblendTask"] 

56 

57logger = logging.getLogger(__name__) 

58 

59 

60class IncompleteDataError(Exception): 

61 """The PSF could not be computed due to incomplete data 

62 """ 

63 pass 

64 

65 

66class ScarletGradientError(Exception): 

67 """An error occurred during optimization 

68 

69 This error occurs when the optimizer encounters 

70 a NaN value while calculating the gradient. 

71 """ 

72 def __init__(self, iterations, sources): 

73 self.iterations = iterations 

74 self.sources = sources 

75 msg = ("ScalarGradientError in iteration {0}. " 

76 "NaN values introduced in sources {1}") 

77 self.message = msg.format(iterations, sources) 

78 

79 def __str__(self): 

80 return self.message 

81 

82 

83def _checkBlendConvergence(blend, f_rel): 

84 """Check whether or not a blend has converged 

85 """ 

86 deltaLoss = np.abs(blend.loss[-2] - blend.loss[-1]) 

87 convergence = f_rel * np.abs(blend.loss[-1]) 

88 return deltaLoss < convergence 

89 

90 

91def _computePsfImage(self, position): 

92 """Get a multiband PSF image 

93 The PSF Kernel Image is computed for each band 

94 and combined into a (filter, y, x) array and stored 

95 as `self._psfImage`. 

96 The result is not cached, so if the same PSF is expected 

97 to be used multiple times it is a good idea to store the 

98 result in another variable. 

99 Note: this is a temporary fix during the deblender sprint. 

100 In the future this function will replace the current method 

101 in `afw.MultibandExposure.computePsfImage` (DM-19789). 

102 Parameters 

103 ---------- 

104 position : `Point2D` or `tuple` 

105 Coordinates to evaluate the PSF. 

106 Returns 

107 ------- 

108 self._psfImage: array 

109 The multiband PSF image. 

110 """ 

111 psfs = [] 

112 # Make the coordinates into a Point2D (if necessary) 

113 if not isinstance(position, Point2D): 

114 position = Point2D(position[0], position[1]) 

115 

116 for bidx, single in enumerate(self.singles): 

117 try: 

118 psf = single.getPsf().computeKernelImage(position) 

119 psfs.append(psf) 

120 except InvalidParameterError: 

121 # This band failed to compute the PSF due to incomplete data 

122 # at that location. This is unlikely to be a problem for Rubin, 

123 # however the edges of some HSC COSMOS fields contain incomplete 

124 # data in some bands, so we track this error to distinguish it 

125 # from unknown errors. 

126 msg = "Failed to compute PSF at {} in band {}" 

127 raise IncompleteDataError(msg.format(position, self.filters[bidx])) 

128 

129 left = np.min([psf.getBBox().getMinX() for psf in psfs]) 

130 bottom = np.min([psf.getBBox().getMinY() for psf in psfs]) 

131 right = np.max([psf.getBBox().getMaxX() for psf in psfs]) 

132 top = np.max([psf.getBBox().getMaxY() for psf in psfs]) 

133 bbox = Box2I(Point2I(left, bottom), Point2I(right, top)) 

134 psfs = [afwImage.utils.projectImage(psf, bbox) for psf in psfs] 

135 psfImage = afwImage.MultibandImage.fromImages(self.filters, psfs) 

136 return psfImage 

137 

138 

139def getFootprintMask(footprint, mExposure): 

140 """Mask pixels outside the footprint 

141 

142 Parameters 

143 ---------- 

144 mExposure : `lsst.image.MultibandExposure` 

145 - The multiband exposure containing the image, 

146 mask, and variance data 

147 footprint : `lsst.detection.Footprint` 

148 - The footprint of the parent to deblend 

149 

150 Returns 

151 ------- 

152 footprintMask : array 

153 Boolean array with pixels not in the footprint set to one. 

154 """ 

155 bbox = footprint.getBBox() 

156 fpMask = afwImage.Mask(bbox) 

157 footprint.spans.setMask(fpMask, 1) 

158 fpMask = ~fpMask.getArray().astype(bool) 

159 return fpMask 

160 

161 

162def isPseudoSource(source, pseudoColumns): 

163 """Check if a source is a pseudo source. 

164 

165 This is mostly for skipping sky objects, 

166 but any other column can also be added to disable 

167 deblending on a parent or individual source when 

168 set to `True`. 

169 

170 Parameters 

171 ---------- 

172 source : `lsst.afw.table.source.source.SourceRecord` 

173 The source to check for the pseudo bit. 

174 pseudoColumns : `list` of `str` 

175 A list of columns to check for pseudo sources. 

176 """ 

177 isPseudo = False 

178 for col in pseudoColumns: 

179 try: 

180 isPseudo |= source[col] 

181 except KeyError: 

182 pass 

183 return isPseudo 

184 

185 

186def deblend(mExposure, footprint, config): 

187 """Deblend a parent footprint 

188 

189 Parameters 

190 ---------- 

191 mExposure : `lsst.image.MultibandExposure` 

192 - The multiband exposure containing the image, 

193 mask, and variance data 

194 footprint : `lsst.detection.Footprint` 

195 - The footprint of the parent to deblend 

196 config : `ScarletDeblendConfig` 

197 - Configuration of the deblending task 

198 

199 Returns 

200 ------- 

201 blend : `scarlet.Blend` 

202 The scarlet blend class that contains all of the information 

203 about the parameters and results from scarlet 

204 skipped : `list` of `int` 

205 The indices of any children that failed to initialize 

206 and were skipped. 

207 spectrumInit : `bool` 

208 Whether or not all of the sources were initialized by jointly 

209 fitting their SED's. This provides a better initialization 

210 but created memory issues when a blend is too large or 

211 contains too many sources. 

212 """ 

213 # Extract coordinates from each MultiColorPeak 

214 bbox = footprint.getBBox() 

215 

216 # Create the data array from the masked images 

217 images = mExposure.image[:, bbox].array 

218 

219 # Use the inverse variance as the weights 

220 if config.useWeights: 

221 weights = 1/mExposure.variance[:, bbox].array 

222 else: 

223 weights = np.ones_like(images) 

224 badPixels = mExposure.mask.getPlaneBitMask(config.badMask) 

225 mask = mExposure.mask[:, bbox].array & badPixels 

226 weights[mask > 0] = 0 

227 

228 # Mask out the pixels outside the footprint 

229 mask = getFootprintMask(footprint, mExposure) 

230 weights *= ~mask 

231 

232 psfs = _computePsfImage(mExposure, footprint.getCentroid()).array.astype(np.float32) 

233 psfs = ImagePSF(psfs) 

234 model_psf = GaussianPSF(sigma=(config.modelPsfSigma,)*len(mExposure.filters)) 

235 

236 frame = Frame(images.shape, psf=model_psf, channels=mExposure.filters) 

237 observation = Observation(images, psf=psfs, weights=weights, channels=mExposure.filters) 

238 if config.convolutionType == "fft": 

239 observation.match(frame) 

240 elif config.convolutionType == "real": 

241 renderer = ConvolutionRenderer(observation, frame, convolution_type="real") 

242 observation.match(frame, renderer=renderer) 

243 else: 

244 raise ValueError("Unrecognized convolution type {}".format(config.convolutionType)) 

245 

246 assert(config.sourceModel in ["single", "double", "compact", "fit"]) 

247 

248 # Set the appropriate number of components 

249 if config.sourceModel == "single": 

250 maxComponents = 1 

251 elif config.sourceModel == "double": 

252 maxComponents = 2 

253 elif config.sourceModel == "compact": 

254 maxComponents = 0 

255 elif config.sourceModel == "point": 

256 raise NotImplementedError("Point source photometry is currently not implemented") 

257 elif config.sourceModel == "fit": 

258 # It is likely in the future that there will be some heuristic 

259 # used to determine what type of model to use for each source, 

260 # but that has not yet been implemented (see DM-22551) 

261 raise NotImplementedError("sourceModel 'fit' has not been implemented yet") 

262 

263 # Convert the centers to pixel coordinates 

264 xmin = bbox.getMinX() 

265 ymin = bbox.getMinY() 

266 centers = [ 

267 np.array([peak.getIy() - ymin, peak.getIx() - xmin], dtype=int) 

268 for peak in footprint.peaks 

269 if not isPseudoSource(peak, config.pseudoColumns) 

270 ] 

271 

272 # Choose whether or not to use the improved spectral initialization 

273 if config.setSpectra: 

274 if config.maxSpectrumCutoff <= 0: 

275 spectrumInit = True 

276 else: 

277 spectrumInit = len(centers) * bbox.getArea() < config.maxSpectrumCutoff 

278 else: 

279 spectrumInit = False 

280 

281 # Only deblend sources that can be initialized 

282 sources, skipped = init_all_sources( 

283 frame=frame, 

284 centers=centers, 

285 observations=observation, 

286 thresh=config.morphThresh, 

287 max_components=maxComponents, 

288 min_snr=config.minSNR, 

289 shifting=False, 

290 fallback=config.fallback, 

291 silent=config.catchFailures, 

292 set_spectra=spectrumInit, 

293 ) 

294 

295 # Attach the peak to all of the initialized sources 

296 srcIndex = 0 

297 for k, center in enumerate(centers): 

298 if k not in skipped: 

299 # This is just to make sure that there isn't a coding bug 

300 assert np.all(sources[srcIndex].center == center) 

301 # Store the record for the peak with the appropriate source 

302 sources[srcIndex].detectedPeak = footprint.peaks[k] 

303 srcIndex += 1 

304 

305 # Create the blend and attempt to optimize it 

306 blend = Blend(sources, observation) 

307 try: 

308 blend.fit(max_iter=config.maxIter, e_rel=config.relativeError) 

309 except ArithmeticError: 

310 # This occurs when a gradient update produces a NaN value 

311 # This is usually due to a source initialized with a 

312 # negative SED or no flux, often because the peak 

313 # is a noise fluctuation in one band and not a real source. 

314 iterations = len(blend.loss) 

315 failedSources = [] 

316 for k, src in enumerate(sources): 

317 if np.any(~np.isfinite(src.get_model())): 

318 failedSources.append(k) 

319 raise ScarletGradientError(iterations, failedSources) 

320 

321 return blend, skipped, spectrumInit 

322 

323 

324def deblend_lite(mExposure, footprint, config, wavelets=None): 

325 """Deblend a parent footprint 

326 

327 Parameters 

328 ---------- 

329 mExposure : `lsst.image.MultibandExposure` 

330 - The multiband exposure containing the image, 

331 mask, and variance data 

332 footprint : `lsst.detection.Footprint` 

333 - The footprint of the parent to deblend 

334 config : `ScarletDeblendConfig` 

335 - Configuration of the deblending task 

336 """ 

337 # Extract coordinates from each MultiColorPeak 

338 bbox = footprint.getBBox() 

339 

340 # Create the data array from the masked images 

341 images = mExposure.image[:, bbox].array 

342 variance = mExposure.variance[:, bbox].array 

343 

344 # Use the inverse variance as the weights 

345 if config.useWeights: 

346 weights = 1/mExposure.variance[:, bbox].array 

347 else: 

348 weights = np.ones_like(images) 

349 badPixels = mExposure.mask.getPlaneBitMask(config.badMask) 

350 mask = mExposure.mask[:, bbox].array & badPixels 

351 weights[mask > 0] = 0 

352 

353 # Mask out the pixels outside the footprint 

354 mask = getFootprintMask(footprint, mExposure) 

355 weights *= ~mask 

356 

357 psfs = _computePsfImage(mExposure, footprint.getCentroid()).array.astype(np.float32) 

358 modelPsf = lite.integrated_circular_gaussian(sigma=config.modelPsfSigma) 

359 

360 observation = lite.LiteObservation( 

361 images=images, 

362 variance=variance, 

363 weights=weights, 

364 psfs=psfs, 

365 model_psf=modelPsf[None, :, :], 

366 convolution_mode=config.convolutionType, 

367 ) 

368 

369 # Convert the centers to pixel coordinates 

370 xmin = bbox.getMinX() 

371 ymin = bbox.getMinY() 

372 centers = [ 

373 np.array([peak.getIy() - ymin, peak.getIx() - xmin], dtype=int) 

374 for peak in footprint.peaks 

375 if not isPseudoSource(peak, config.pseudoColumns) 

376 ] 

377 

378 # Initialize the sources 

379 if config.morphImage == "chi2": 

380 sources = lite.init_all_sources_main( 

381 observation, 

382 centers, 

383 min_snr=config.minSNR, 

384 thresh=config.morphThresh, 

385 ) 

386 elif config.morphImage == "wavelet": 

387 _bbox = bboxToScarletBox(len(mExposure.filters), bbox, bbox.getMin()) 

388 _wavelets = wavelets[(slice(None), *_bbox[1:].slices)] 

389 sources = lite.init_all_sources_wavelets( 

390 observation, 

391 centers, 

392 use_psf=False, 

393 wavelets=_wavelets, 

394 min_snr=config.minSNR, 

395 ) 

396 else: 

397 raise ValueError("morphImage must be either 'chi2' or 'wavelet'.") 

398 

399 # Set the optimizer 

400 if config.optimizer == "adaprox": 

401 parameterization = partial( 

402 lite.init_adaprox_component, 

403 bg_thresh=config.backgroundThresh, 

404 max_prox_iter=config.maxProxIter, 

405 ) 

406 elif config.optimizer == "fista": 

407 parameterization = partial( 

408 lite.init_fista_component, 

409 bg_thresh=config.backgroundThresh, 

410 ) 

411 else: 

412 raise ValueError("Unrecognized optimizer. Must be either 'adaprox' or 'fista'.") 

413 sources = lite.parameterize_sources(sources, observation, parameterization) 

414 

415 # Attach the peak to all of the initialized sources 

416 for k, center in enumerate(centers): 

417 # This is just to make sure that there isn't a coding bug 

418 if len(sources[k].components) > 0 and np.any(sources[k].center != center): 

419 raise ValueError("Misaligned center, expected {center} but got {sources[k].center}") 

420 # Store the record for the peak with the appropriate source 

421 sources[k].detectedPeak = footprint.peaks[k] 

422 

423 blend = lite.LiteBlend(sources, observation) 

424 

425 # Initialize each source with its best fit spectrum 

426 # This significantly cuts down on the number of iterations 

427 # that the optimizer needs and usually results in a better 

428 # fit, but using least squares on a very large blend causes memory issues. 

429 # This is typically the most expensive operation in deblending, memorywise. 

430 spectrumInit = False 

431 if config.setSpectra: 

432 if config.maxSpectrumCutoff <= 0 or len(centers) * bbox.getArea() < config.maxSpectrumCutoff: 

433 spectrumInit = True 

434 blend.fit_spectra() 

435 

436 # Set the sources that could not be initialized and were skipped 

437 skipped = [src for src in sources if src.is_null] 

438 

439 blend.fit(max_iter=config.maxIter, e_rel=config.relativeError, min_iter=config.minIter) 

440 

441 return blend, skipped, spectrumInit 

442 

443 

444class ScarletDeblendConfig(pexConfig.Config): 

445 """MultibandDeblendConfig 

446 

447 Configuration for the multiband deblender. 

448 The parameters are organized by the parameter types, which are 

449 - Stopping Criteria: Used to determine if the fit has converged 

450 - Position Fitting Criteria: Used to fit the positions of the peaks 

451 - Constraints: Used to apply constraints to the peaks and their components 

452 - Other: Parameters that don't fit into the above categories 

453 """ 

454 # Stopping Criteria 

455 minIter = pexConfig.Field(dtype=int, default=1, 

456 doc="Minimum number of iterations before the optimizer is allowed to stop.") 

457 maxIter = pexConfig.Field(dtype=int, default=300, 

458 doc=("Maximum number of iterations to deblend a single parent")) 

459 relativeError = pexConfig.Field(dtype=float, default=1e-2, 

460 doc=("Change in the loss function between iterations to exit fitter. " 

461 "Typically this is `1e-2` if measurements will be made on the " 

462 "flux re-distributed models and `1e-4` when making measurements " 

463 "on the models themselves.")) 

464 

465 # Constraints 

466 morphThresh = pexConfig.Field(dtype=float, default=1, 

467 doc="Fraction of background RMS a pixel must have" 

468 "to be included in the initial morphology") 

469 # Lite Parameters 

470 # All of these parameters (except version) are only valid if version='lite' 

471 version = pexConfig.ChoiceField( 

472 dtype=str, 

473 default="lite", 

474 allowed={ 

475 "scarlet": "main scarlet version (likely to be deprecated soon)", 

476 "lite": "Optimized version of scarlet for survey data from a single instrument", 

477 }, 

478 doc="The version of scarlet to use.", 

479 ) 

480 optimizer = pexConfig.ChoiceField( 

481 dtype=str, 

482 default="adaprox", 

483 allowed={ 

484 "adaprox": "Proximal ADAM optimization", 

485 "fista": "Accelerated proximal gradient method", 

486 }, 

487 doc="The optimizer to use for fitting parameters and is only used when version='lite'", 

488 ) 

489 morphImage = pexConfig.ChoiceField( 

490 dtype=str, 

491 default="chi2", 

492 allowed={ 

493 "chi2": "Initialize sources on a chi^2 image made from all available bands", 

494 "wavelet": "Initialize sources using a wavelet decomposition of the chi^2 image", 

495 }, 

496 doc="The type of image to use for initializing the morphology. " 

497 "Must be either 'chi2' or 'wavelet'. " 

498 ) 

499 backgroundThresh = pexConfig.Field( 

500 dtype=float, 

501 default=0.25, 

502 doc="Fraction of background to use for a sparsity threshold. " 

503 "This prevents sources from growing unrealistically outside " 

504 "the parent footprint while still modeling flux correctly " 

505 "for bright sources." 

506 ) 

507 maxProxIter = pexConfig.Field( 

508 dtype=int, 

509 default=1, 

510 doc="Maximum number of proximal operator iterations inside of each " 

511 "iteration of the optimizer. " 

512 "This config field is only used if version='lite' and optimizer='adaprox'." 

513 ) 

514 waveletScales = pexConfig.Field( 

515 dtype=int, 

516 default=5, 

517 doc="Number of wavelet scales to use for wavelet initialization. " 

518 "This field is only used when `version`='lite' and `morphImage`='wavelet'." 

519 ) 

520 

521 # Other scarlet paremeters 

522 useWeights = pexConfig.Field( 

523 dtype=bool, default=True, 

524 doc=("Whether or not use use inverse variance weighting." 

525 "If `useWeights` is `False` then flat weights are used")) 

526 modelPsfSize = pexConfig.Field( 

527 dtype=int, default=11, 

528 doc="Model PSF side length in pixels") 

529 modelPsfSigma = pexConfig.Field( 

530 dtype=float, default=0.8, 

531 doc="Define sigma for the model frame PSF") 

532 minSNR = pexConfig.Field( 

533 dtype=float, default=50, 

534 doc="Minimum Signal to noise to accept the source." 

535 "Sources with lower flux will be initialized with the PSF but updated " 

536 "like an ordinary ExtendedSource (known in scarlet as a `CompactSource`).") 

537 saveTemplates = pexConfig.Field( 

538 dtype=bool, default=True, 

539 doc="Whether or not to save the SEDs and templates") 

540 processSingles = pexConfig.Field( 

541 dtype=bool, default=True, 

542 doc="Whether or not to process isolated sources in the deblender") 

543 convolutionType = pexConfig.Field( 

544 dtype=str, default="fft", 

545 doc="Type of convolution to render the model to the observations.\n" 

546 "- 'fft': perform convolutions in Fourier space\n" 

547 "- 'real': peform convolutions in real space.") 

548 sourceModel = pexConfig.Field( 

549 dtype=str, default="double", 

550 doc=("How to determine which model to use for sources, from\n" 

551 "- 'single': use a single component for all sources\n" 

552 "- 'double': use a bulge disk model for all sources\n" 

553 "- 'compact': use a single component model, initialzed with a point source morphology, " 

554 " for all sources\n" 

555 "- 'point': use a point-source model for all sources\n" 

556 "- 'fit: use a PSF fitting model to determine the number of components (not yet implemented)"), 

557 deprecated="This field will be deprecated when the default for `version` is changed to `lite`.", 

558 ) 

559 setSpectra = pexConfig.Field( 

560 dtype=bool, default=True, 

561 doc="Whether or not to solve for the best-fit spectra during initialization. " 

562 "This makes initialization slightly longer, as it requires a convolution " 

563 "to set the optimal spectra, but results in a much better initial log-likelihood " 

564 "and reduced total runtime, with convergence in fewer iterations." 

565 "This option is only used when " 

566 "peaks*area < `maxSpectrumCutoff` will use the improved initialization.") 

567 

568 # Mask-plane restrictions 

569 badMask = pexConfig.ListField( 

570 dtype=str, default=["BAD", "CR", "NO_DATA", "SAT", "SUSPECT", "EDGE"], 

571 doc="Whether or not to process isolated sources in the deblender") 

572 statsMask = pexConfig.ListField(dtype=str, default=["SAT", "INTRP", "NO_DATA"], 

573 doc="Mask planes to ignore when performing statistics") 

574 maskLimits = pexConfig.DictField( 

575 keytype=str, 

576 itemtype=float, 

577 default={}, 

578 doc=("Mask planes with the corresponding limit on the fraction of masked pixels. " 

579 "Sources violating this limit will not be deblended."), 

580 ) 

581 

582 # Size restrictions 

583 maxNumberOfPeaks = pexConfig.Field( 

584 dtype=int, default=200, 

585 doc=("Only deblend the brightest maxNumberOfPeaks peaks in the parent" 

586 " (<= 0: unlimited)")) 

587 maxFootprintArea = pexConfig.Field( 

588 dtype=int, default=100_000, 

589 doc=("Maximum area for footprints before they are ignored as large; " 

590 "non-positive means no threshold applied")) 

591 maxAreaTimesPeaks = pexConfig.Field( 

592 dtype=int, default=10_000_000, 

593 doc=("Maximum rectangular footprint area * nPeaks in the footprint. " 

594 "This was introduced in DM-33690 to prevent fields that are crowded or have a " 

595 "LSB galaxy that causes memory intensive initialization in scarlet from dominating " 

596 "the overall runtime and/or causing the task to run out of memory. " 

597 "(<= 0: unlimited)") 

598 ) 

599 maxFootprintSize = pexConfig.Field( 

600 dtype=int, default=0, 

601 doc=("Maximum linear dimension for footprints before they are ignored " 

602 "as large; non-positive means no threshold applied")) 

603 minFootprintAxisRatio = pexConfig.Field( 

604 dtype=float, default=0.0, 

605 doc=("Minimum axis ratio for footprints before they are ignored " 

606 "as large; non-positive means no threshold applied")) 

607 maxSpectrumCutoff = pexConfig.Field( 

608 dtype=int, default=1_000_000, 

609 doc=("Maximum number of pixels * number of sources in a blend. " 

610 "This is different than `maxFootprintArea` because this isn't " 

611 "the footprint area but the area of the bounding box that " 

612 "contains the footprint, and is also multiplied by the number of" 

613 "sources in the footprint. This prevents large skinny blends with " 

614 "a high density of sources from running out of memory. " 

615 "If `maxSpectrumCutoff == -1` then there is no cutoff.") 

616 ) 

617 

618 # Failure modes 

619 fallback = pexConfig.Field( 

620 dtype=bool, default=True, 

621 doc="Whether or not to fallback to a smaller number of components if a source does not initialize" 

622 ) 

623 notDeblendedMask = pexConfig.Field( 

624 dtype=str, default="NOT_DEBLENDED", optional=True, 

625 doc="Mask name for footprints not deblended, or None") 

626 catchFailures = pexConfig.Field( 

627 dtype=bool, default=True, 

628 doc=("If True, catch exceptions thrown by the deblender, log them, " 

629 "and set a flag on the parent, instead of letting them propagate up")) 

630 

631 # Other options 

632 columnInheritance = pexConfig.DictField( 

633 keytype=str, itemtype=str, default={ 

634 "deblend_nChild": "deblend_parentNChild", 

635 "deblend_nPeaks": "deblend_parentNPeaks", 

636 "deblend_spectrumInitFlag": "deblend_spectrumInitFlag", 

637 "deblend_blendConvergenceFailedFlag": "deblend_blendConvergenceFailedFlag", 

638 }, 

639 doc="Columns to pass from the parent to the child. " 

640 "The key is the name of the column for the parent record, " 

641 "the value is the name of the column to use for the child." 

642 ) 

643 pseudoColumns = pexConfig.ListField( 

644 dtype=str, default=['merge_peak_sky', 'sky_source'], 

645 doc="Names of flags which should never be deblended." 

646 ) 

647 

648 # Logging option(s) 

649 loggingInterval = pexConfig.Field( 

650 dtype=int, default=600, 

651 doc="Interval (in seconds) to log messages (at VERBOSE level) while deblending sources.", 

652 deprecated="This field is no longer used and will be removed in v25.", 

653 ) 

654 # Testing options 

655 # Some obs packages and ci packages run the full pipeline on a small 

656 # subset of data to test that the pipeline is functioning properly. 

657 # This is not meant as scientific validation, so it can be useful 

658 # to only run on a small subset of the data that is large enough to 

659 # test the desired pipeline features but not so long that the deblender 

660 # is the tall pole in terms of execution times. 

661 useCiLimits = pexConfig.Field( 

662 dtype=bool, default=False, 

663 doc="Limit the number of sources deblended for CI to prevent long build times") 

664 ciDeblendChildRange = pexConfig.ListField( 

665 dtype=int, default=[5, 10], 

666 doc="Only deblend parent Footprints with a number of peaks in the (inclusive) range indicated." 

667 "If `useCiLimits==False` then this parameter is ignored.") 

668 ciNumParentsToDeblend = pexConfig.Field( 

669 dtype=int, default=10, 

670 doc="Only use the first `ciNumParentsToDeblend` parent footprints with a total peak count " 

671 "within `ciDebledChildRange`. " 

672 "If `useCiLimits==False` then this parameter is ignored.") 

673 

674 

675class ScarletDeblendTask(pipeBase.Task): 

676 """ScarletDeblendTask 

677 

678 Split blended sources into individual sources. 

679 

680 This task has no return value; it only modifies the SourceCatalog in-place. 

681 """ 

682 ConfigClass = ScarletDeblendConfig 

683 _DefaultName = "scarletDeblend" 

684 

685 def __init__(self, schema, peakSchema=None, **kwargs): 

686 """Create the task, adding necessary fields to the given schema. 

687 

688 Parameters 

689 ---------- 

690 schema : `lsst.afw.table.schema.schema.Schema` 

691 Schema object for measurement fields; will be modified in-place. 

692 peakSchema : `lsst.afw.table.schema.schema.Schema` 

693 Schema of Footprint Peaks that will be passed to the deblender. 

694 Any fields beyond the PeakTable minimal schema will be transferred 

695 to the main source Schema. If None, no fields will be transferred 

696 from the Peaks. 

697 filters : list of str 

698 Names of the filters used for the eposures. This is needed to store 

699 the SED as a field 

700 **kwargs 

701 Passed to Task.__init__. 

702 """ 

703 pipeBase.Task.__init__(self, **kwargs) 

704 

705 peakMinimalSchema = afwDet.PeakTable.makeMinimalSchema() 

706 if peakSchema is None: 

707 # In this case, the peakSchemaMapper will transfer nothing, but 

708 # we'll still have one 

709 # to simplify downstream code 

710 self.peakSchemaMapper = afwTable.SchemaMapper(peakMinimalSchema, schema) 

711 else: 

712 self.peakSchemaMapper = afwTable.SchemaMapper(peakSchema, schema) 

713 for item in peakSchema: 

714 if item.key not in peakMinimalSchema: 

715 self.peakSchemaMapper.addMapping(item.key, item.field) 

716 # Because SchemaMapper makes a copy of the output schema 

717 # you give its ctor, it isn't updating this Schema in 

718 # place. That's probably a design flaw, but in the 

719 # meantime, we'll keep that schema in sync with the 

720 # peakSchemaMapper.getOutputSchema() manually, by adding 

721 # the same fields to both. 

722 schema.addField(item.field) 

723 assert schema == self.peakSchemaMapper.getOutputSchema(), "Logic bug mapping schemas" 

724 self._addSchemaKeys(schema) 

725 self.schema = schema 

726 self.toCopyFromParent = [item.key for item in self.schema 

727 if item.field.getName().startswith("merge_footprint")] 

728 

729 def _addSchemaKeys(self, schema): 

730 """Add deblender specific keys to the schema 

731 """ 

732 self.runtimeKey = schema.addField('deblend_runtime', type=np.float32, doc='runtime in ms') 

733 

734 self.iterKey = schema.addField('deblend_iterations', type=np.int32, doc='iterations to converge') 

735 

736 self.nChildKey = schema.addField('deblend_nChild', type=np.int32, 

737 doc='Number of children this object has (defaults to 0)') 

738 self.psfKey = schema.addField('deblend_deblendedAsPsf', type='Flag', 

739 doc='Deblender thought this source looked like a PSF') 

740 self.tooManyPeaksKey = schema.addField('deblend_tooManyPeaks', type='Flag', 

741 doc='Source had too many peaks; ' 

742 'only the brightest were included') 

743 self.tooBigKey = schema.addField('deblend_parentTooBig', type='Flag', 

744 doc='Parent footprint covered too many pixels') 

745 self.maskedKey = schema.addField('deblend_masked', type='Flag', 

746 doc='Parent footprint was predominantly masked') 

747 self.sedNotConvergedKey = schema.addField('deblend_sedConvergenceFailed', type='Flag', 

748 doc='scarlet sed optimization did not converge before' 

749 'config.maxIter') 

750 self.morphNotConvergedKey = schema.addField('deblend_morphConvergenceFailed', type='Flag', 

751 doc='scarlet morph optimization did not converge before' 

752 'config.maxIter') 

753 self.blendConvergenceFailedFlagKey = schema.addField('deblend_blendConvergenceFailedFlag', 

754 type='Flag', 

755 doc='at least one source in the blend' 

756 'failed to converge') 

757 self.edgePixelsKey = schema.addField('deblend_edgePixels', type='Flag', 

758 doc='Source had flux on the edge of the parent footprint') 

759 self.deblendFailedKey = schema.addField('deblend_failed', type='Flag', 

760 doc="Deblending failed on source") 

761 self.deblendErrorKey = schema.addField('deblend_error', type="String", size=25, 

762 doc='Name of error if the blend failed') 

763 self.deblendSkippedKey = schema.addField('deblend_skipped', type='Flag', 

764 doc="Deblender skipped this source") 

765 self.peakCenter = afwTable.Point2IKey.addFields(schema, name="deblend_peak_center", 

766 doc="Center used to apply constraints in scarlet", 

767 unit="pixel") 

768 self.peakIdKey = schema.addField("deblend_peakId", type=np.int32, 

769 doc="ID of the peak in the parent footprint. " 

770 "This is not unique, but the combination of 'parent'" 

771 "and 'peakId' should be for all child sources. " 

772 "Top level blends with no parents have 'peakId=0'") 

773 self.modelCenterFlux = schema.addField('deblend_peak_instFlux', type=float, units='count', 

774 doc="The instFlux at the peak position of deblended mode") 

775 self.modelTypeKey = schema.addField("deblend_modelType", type="String", size=25, 

776 doc="The type of model used, for example " 

777 "MultiExtendedSource, SingleExtendedSource, PointSource") 

778 self.nPeaksKey = schema.addField("deblend_nPeaks", type=np.int32, 

779 doc="Number of initial peaks in the blend. " 

780 "This includes peaks that may have been culled " 

781 "during deblending or failed to deblend") 

782 self.parentNPeaksKey = schema.addField("deblend_parentNPeaks", type=np.int32, 

783 doc="deblend_nPeaks from this records parent.") 

784 self.parentNChildKey = schema.addField("deblend_parentNChild", type=np.int32, 

785 doc="deblend_nChild from this records parent.") 

786 self.scarletFluxKey = schema.addField("deblend_scarletFlux", type=np.float32, 

787 doc="Flux measurement from scarlet") 

788 self.scarletLogLKey = schema.addField("deblend_logL", type=np.float32, 

789 doc="Final logL, used to identify regressions in scarlet.") 

790 self.scarletSpectrumInitKey = schema.addField("deblend_spectrumInitFlag", type='Flag', 

791 doc="True when scarlet initializes sources " 

792 "in the blend with a more accurate spectrum. " 

793 "The algorithm uses a lot of memory, " 

794 "so large dense blends will use " 

795 "a less accurate initialization.") 

796 

797 # self.log.trace('Added keys to schema: %s', ", ".join(str(x) for x in 

798 # (self.nChildKey, self.tooManyPeaksKey, self.tooBigKey)) 

799 # ) 

800 

801 @timeMethod 

802 def run(self, mExposure, mergedSources): 

803 """Get the psf from each exposure and then run deblend(). 

804 

805 Parameters 

806 ---------- 

807 mExposure : `MultibandExposure` 

808 The exposures should be co-added images of the same 

809 shape and region of the sky. 

810 mergedSources : `SourceCatalog` 

811 The merged `SourceCatalog` that contains parent footprints 

812 to (potentially) deblend. 

813 

814 Returns 

815 ------- 

816 templateCatalogs: dict 

817 Keys are the names of the filters and the values are 

818 `lsst.afw.table.source.source.SourceCatalog`'s. 

819 These are catalogs with heavy footprints that are the templates 

820 created by the multiband templates. 

821 """ 

822 return self.deblend(mExposure, mergedSources) 

823 

824 @timeMethod 

825 def deblend(self, mExposure, catalog): 

826 """Deblend a data cube of multiband images 

827 

828 Parameters 

829 ---------- 

830 mExposure : `MultibandExposure` 

831 The exposures should be co-added images of the same 

832 shape and region of the sky. 

833 catalog : `SourceCatalog` 

834 The merged `SourceCatalog` that contains parent footprints 

835 to (potentially) deblend. The new deblended sources are 

836 appended to this catalog in place. 

837 

838 Returns 

839 ------- 

840 catalogs : `dict` or `None` 

841 Keys are the names of the filters and the values are 

842 `lsst.afw.table.source.source.SourceCatalog`'s. 

843 These are catalogs with heavy footprints that are the templates 

844 created by the multiband templates. 

845 """ 

846 import time 

847 

848 # Cull footprints if required by ci 

849 if self.config.useCiLimits: 

850 self.log.info("Using CI catalog limits, the original number of sources to deblend was %d.", 

851 len(catalog)) 

852 # Select parents with a number of children in the range 

853 # config.ciDeblendChildRange 

854 minChildren, maxChildren = self.config.ciDeblendChildRange 

855 nPeaks = np.array([len(src.getFootprint().peaks) for src in catalog]) 

856 childrenInRange = np.where((nPeaks >= minChildren) & (nPeaks <= maxChildren))[0] 

857 if len(childrenInRange) < self.config.ciNumParentsToDeblend: 

858 raise ValueError("Fewer than ciNumParentsToDeblend children were contained in the range " 

859 "indicated by ciDeblendChildRange. Adjust this range to include more " 

860 "parents.") 

861 # Keep all of the isolated parents and the first 

862 # `ciNumParentsToDeblend` children 

863 parents = nPeaks == 1 

864 children = np.zeros((len(catalog),), dtype=bool) 

865 children[childrenInRange[:self.config.ciNumParentsToDeblend]] = True 

866 catalog = catalog[parents | children] 

867 # We need to update the IdFactory, otherwise the the source ids 

868 # will not be sequential 

869 idFactory = catalog.getIdFactory() 

870 maxId = np.max(catalog["id"]) 

871 idFactory.notify(maxId) 

872 

873 filters = mExposure.filters 

874 self.log.info("Deblending %d sources in %d exposure bands", len(catalog), len(mExposure)) 

875 periodicLog = PeriodicLogger(self.log) 

876 

877 # Create a set of wavelet coefficients if using wavelet initialization 

878 if self.config.version == "lite" and self.config.morphImage == "wavelet": 

879 images = mExposure.image.array 

880 variance = mExposure.variance.array 

881 wavelets = get_detect_wavelets(images, variance, scales=self.config.waveletScales) 

882 else: 

883 wavelets = None 

884 

885 # Add the NOT_DEBLENDED mask to the mask plane in each band 

886 if self.config.notDeblendedMask: 

887 for mask in mExposure.mask: 

888 mask.addMaskPlane(self.config.notDeblendedMask) 

889 

890 nParents = len(catalog) 

891 nDeblendedParents = 0 

892 skippedParents = [] 

893 multibandColumns = { 

894 "heavies": [], 

895 "fluxes": [], 

896 "centerFluxes": [], 

897 } 

898 weightedColumns = { 

899 "heavies": [], 

900 "fluxes": [], 

901 "centerFluxes": [], 

902 } 

903 for parentIndex in range(nParents): 

904 parent = catalog[parentIndex] 

905 foot = parent.getFootprint() 

906 bbox = foot.getBBox() 

907 peaks = foot.getPeaks() 

908 

909 # Since we use the first peak for the parent object, we should 

910 # propagate its flags to the parent source. 

911 parent.assign(peaks[0], self.peakSchemaMapper) 

912 

913 # Skip isolated sources unless processSingles is turned on. 

914 # Note: this does not flag isolated sources as skipped or 

915 # set the NOT_DEBLENDED mask in the exposure, 

916 # since these aren't really a skipped blends. 

917 # We also skip pseudo sources, like sky objects, which 

918 # are intended to be skipped 

919 if ((len(peaks) < 2 and not self.config.processSingles) 

920 or isPseudoSource(parent, self.config.pseudoColumns)): 

921 self._updateParentRecord( 

922 parent=parent, 

923 nPeaks=len(peaks), 

924 nChild=0, 

925 runtime=np.nan, 

926 iterations=0, 

927 logL=np.nan, 

928 spectrumInit=False, 

929 converged=False, 

930 ) 

931 continue 

932 

933 # Block of conditions for skipping a parent with multiple children 

934 skipKey = None 

935 if self._isLargeFootprint(foot): 

936 # The footprint is above the maximum footprint size limit 

937 skipKey = self.tooBigKey 

938 skipMessage = f"Parent {parent.getId()}: skipping large footprint" 

939 elif self._isMasked(foot, mExposure): 

940 # The footprint exceeds the maximum number of masked pixels 

941 skipKey = self.maskedKey 

942 skipMessage = f"Parent {parent.getId()}: skipping masked footprint" 

943 elif self.config.maxNumberOfPeaks > 0 and len(peaks) > self.config.maxNumberOfPeaks: 

944 # Unlike meas_deblender, in scarlet we skip the entire blend 

945 # if the number of peaks exceeds max peaks, since neglecting 

946 # to model any peaks often results in catastrophic failure 

947 # of scarlet to generate models for the brighter sources. 

948 skipKey = self.tooManyPeaksKey 

949 skipMessage = f"Parent {parent.getId()}: Too many peaks, skipping blend" 

950 if skipKey is not None: 

951 self._skipParent( 

952 parent=parent, 

953 skipKey=skipKey, 

954 logMessage=skipMessage, 

955 ) 

956 skippedParents.append(parentIndex) 

957 continue 

958 

959 nDeblendedParents += 1 

960 self.log.trace("Parent %d: deblending %d peaks", parent.getId(), len(peaks)) 

961 # Run the deblender 

962 blendError = None 

963 try: 

964 t0 = time.monotonic() 

965 # Build the parameter lists with the same ordering 

966 if self.config.version == "scarlet": 

967 blend, skipped, spectrumInit = deblend(mExposure, foot, self.config) 

968 elif self.config.version == "lite": 

969 blend, skipped, spectrumInit = deblend_lite(mExposure, foot, self.config, wavelets) 

970 tf = time.monotonic() 

971 runtime = (tf-t0)*1000 

972 converged = _checkBlendConvergence(blend, self.config.relativeError) 

973 

974 scarletSources = [src for src in blend.sources] 

975 nChild = len(scarletSources) 

976 # Catch all errors and filter out the ones that we know about 

977 except Exception as e: 

978 blendError = type(e).__name__ 

979 if isinstance(e, ScarletGradientError): 

980 parent.set(self.iterKey, e.iterations) 

981 elif not isinstance(e, IncompleteDataError): 

982 blendError = "UnknownError" 

983 if self.config.catchFailures: 

984 # Make it easy to find UnknownErrors in the log file 

985 self.log.warn("UnknownError") 

986 import traceback 

987 traceback.print_exc() 

988 else: 

989 raise 

990 

991 self._skipParent( 

992 parent=parent, 

993 skipKey=self.deblendFailedKey, 

994 logMessage=f"Unable to deblend source {parent.getId}: {blendError}", 

995 ) 

996 parent.set(self.deblendErrorKey, blendError) 

997 skippedParents.append(parentIndex) 

998 continue 

999 

1000 # Update the parent record with the deblending results 

1001 if self.config.version == "scarlet": 

1002 logL = -blend.loss[-1] + blend.observations[0].log_norm 

1003 elif self.config.version == "lite": 

1004 logL = blend.loss[-1] 

1005 self._updateParentRecord( 

1006 parent=parent, 

1007 nPeaks=len(peaks), 

1008 nChild=nChild, 

1009 runtime=runtime, 

1010 iterations=len(blend.loss), 

1011 logL=logL, 

1012 spectrumInit=spectrumInit, 

1013 converged=converged, 

1014 ) 

1015 

1016 # Add each deblended source to the catalog 

1017 for k, scarletSource in enumerate(scarletSources): 

1018 # Skip any sources with no flux or that scarlet skipped because 

1019 # it could not initialize 

1020 if k in skipped or (self.config.version == "lite" and scarletSource.is_null): 

1021 # No need to propagate anything 

1022 continue 

1023 parent.set(self.deblendSkippedKey, False) 

1024 if self.config.version == "lite": 

1025 mHeavy = liteModelToHeavy(scarletSource, mExposure, blend, xy0=bbox.getMin()) 

1026 weightedHeavy = liteModelToHeavy( 

1027 scarletSource, mExposure, blend, xy0=bbox.getMin(), useFlux=True) 

1028 weightedColumns["heavies"].append(weightedHeavy) 

1029 flux = scarletSource.get_model(use_flux=True).sum(axis=(1, 2)) 

1030 weightedColumns["fluxes"].append({ 

1031 filters[fidx]: _flux 

1032 for fidx, _flux in enumerate(flux) 

1033 }) 

1034 centerFlux = self._getCenterFlux(weightedHeavy, scarletSource, xy0=bbox.getMin()) 

1035 weightedColumns["centerFluxes"].append(centerFlux) 

1036 else: 

1037 mHeavy = modelToHeavy(scarletSource, mExposure, blend, xy0=bbox.getMin()) 

1038 multibandColumns["heavies"].append(mHeavy) 

1039 flux = scarlet.measure.flux(scarletSource) 

1040 multibandColumns["fluxes"].append({ 

1041 filters[fidx]: _flux 

1042 for fidx, _flux in enumerate(flux) 

1043 }) 

1044 centerFlux = self._getCenterFlux(mHeavy, scarletSource, xy0=bbox.getMin()) 

1045 multibandColumns["centerFluxes"].append(centerFlux) 

1046 

1047 # Add all fields except the HeavyFootprint to the 

1048 # source record 

1049 self._addChild( 

1050 parent=parent, 

1051 mHeavy=mHeavy, 

1052 catalog=catalog, 

1053 scarletSource=scarletSource, 

1054 ) 

1055 

1056 # Log a message if it has been a while since the last log. 

1057 periodicLog.log("Deblended %d parent sources out of %d", parentIndex + 1, nParents) 

1058 

1059 # Clear the cached values in scarlet to clear out memory 

1060 scarlet.cache.Cache._cache = {} 

1061 

1062 # Make sure that the number of new sources matches the number of 

1063 # entries in each of the band dependent columns. 

1064 # This should never trigger and is just a sanity check. 

1065 nChildren = len(catalog) - nParents 

1066 if np.any([len(meas) != nChildren for meas in multibandColumns.values()]): 

1067 msg = f"Added {len(catalog)-nParents} new sources, but have " 

1068 msg += ", ".join([ 

1069 f"{len(value)} {key}" 

1070 for key, value in multibandColumns.items() 

1071 ]) 

1072 raise RuntimeError(msg) 

1073 # Make a copy of the catlog in each band and update the footprints 

1074 catalogs = {} 

1075 for f in filters: 

1076 _catalog = afwTable.SourceCatalog(catalog.table.clone()) 

1077 _catalog.extend(catalog, deep=True) 

1078 

1079 # Update the footprints and columns that are different 

1080 # for each filter 

1081 for sourceIndex, source in enumerate(_catalog[nParents:]): 

1082 source.setFootprint(multibandColumns["heavies"][sourceIndex][f]) 

1083 source.set(self.scarletFluxKey, multibandColumns["fluxes"][sourceIndex][f]) 

1084 source.set(self.modelCenterFlux, multibandColumns["centerFluxes"][sourceIndex][f]) 

1085 catalogs[f] = _catalog 

1086 

1087 weightedCatalogs = {} 

1088 if self.config.version == "lite": 

1089 # Also create a catalog by reweighting the flux 

1090 weightedCatalogs = {} 

1091 for f in filters: 

1092 _catalog = afwTable.SourceCatalog(catalog.table.clone()) 

1093 _catalog.extend(catalog, deep=True) 

1094 

1095 # Update the footprints and columns that are different 

1096 # for each filter 

1097 for sourceIndex, source in enumerate(_catalog[nParents:]): 

1098 source.setFootprint(weightedColumns["heavies"][sourceIndex][f]) 

1099 source.set(self.scarletFluxKey, weightedColumns["fluxes"][sourceIndex][f]) 

1100 source.set(self.modelCenterFlux, weightedColumns["centerFluxes"][sourceIndex][f]) 

1101 weightedCatalogs[f] = _catalog 

1102 

1103 # Update the mExposure mask with the footprint of skipped parents 

1104 if self.config.notDeblendedMask: 

1105 for mask in mExposure.mask: 

1106 for parentIndex in skippedParents: 

1107 fp = _catalog[parentIndex].getFootprint() 

1108 fp.spans.setMask(mask, mask.getPlaneBitMask(self.config.notDeblendedMask)) 

1109 

1110 self.log.info("Deblender results: of %d parent sources, %d were deblended, " 

1111 "creating %d children, for a total of %d sources", 

1112 nParents, nDeblendedParents, nChildren, len(catalog)) 

1113 return catalogs, weightedCatalogs 

1114 

1115 def _isLargeFootprint(self, footprint): 

1116 """Returns whether a Footprint is large 

1117 

1118 'Large' is defined by thresholds on the area, size and axis ratio, 

1119 and total area of the bounding box multiplied by 

1120 the number of children. 

1121 These may be disabled independently by configuring them to be 

1122 non-positive. 

1123 """ 

1124 if self.config.maxFootprintArea > 0 and footprint.getArea() > self.config.maxFootprintArea: 

1125 return True 

1126 if self.config.maxFootprintSize > 0: 

1127 bbox = footprint.getBBox() 

1128 if max(bbox.getWidth(), bbox.getHeight()) > self.config.maxFootprintSize: 

1129 return True 

1130 if self.config.minFootprintAxisRatio > 0: 

1131 axes = afwEll.Axes(footprint.getShape()) 

1132 if axes.getB() < self.config.minFootprintAxisRatio*axes.getA(): 

1133 return True 

1134 if self.config.maxAreaTimesPeaks > 0: 

1135 if footprint.getBBox().getArea() * len(footprint.peaks) > self.config.maxAreaTimesPeaks: 

1136 return True 

1137 return False 

1138 

1139 def _isMasked(self, footprint, mExposure): 

1140 """Returns whether the footprint violates the mask limits""" 

1141 bbox = footprint.getBBox() 

1142 mask = np.bitwise_or.reduce(mExposure.mask[:, bbox].array, axis=0) 

1143 size = float(footprint.getArea()) 

1144 for maskName, limit in self.config.maskLimits.items(): 

1145 maskVal = mExposure.mask.getPlaneBitMask(maskName) 

1146 _mask = afwImage.MaskX(mask & maskVal, xy0=bbox.getMin()) 

1147 unmaskedSpan = footprint.spans.intersectNot(_mask) # spanset of unmasked pixels 

1148 if (size - unmaskedSpan.getArea())/size > limit: 

1149 return True 

1150 return False 

1151 

1152 def _skipParent(self, parent, skipKey, logMessage): 

1153 """Update a parent record that is not being deblended. 

1154 

1155 This is a fairly trivial function but is implemented to ensure 

1156 that a skipped parent updates the appropriate columns 

1157 consistently, and always has a flag to mark the reason that 

1158 it is being skipped. 

1159 

1160 Parameters 

1161 ---------- 

1162 parent : `lsst.afw.table.source.source.SourceRecord` 

1163 The parent record to flag as skipped. 

1164 skipKey : `bool` 

1165 The name of the flag to mark the reason for skipping. 

1166 logMessage : `str` 

1167 The message to display in a log.trace when a source 

1168 is skipped. 

1169 """ 

1170 if logMessage is not None: 

1171 self.log.trace(logMessage) 

1172 self._updateParentRecord( 

1173 parent=parent, 

1174 nPeaks=len(parent.getFootprint().peaks), 

1175 nChild=0, 

1176 runtime=np.nan, 

1177 iterations=0, 

1178 logL=np.nan, 

1179 spectrumInit=False, 

1180 converged=False, 

1181 ) 

1182 

1183 # Mark the source as skipped by the deblender and 

1184 # flag the reason why. 

1185 parent.set(self.deblendSkippedKey, True) 

1186 parent.set(skipKey, True) 

1187 

1188 def _updateParentRecord(self, parent, nPeaks, nChild, 

1189 runtime, iterations, logL, spectrumInit, converged): 

1190 """Update a parent record in all of the single band catalogs. 

1191 

1192 Ensure that all locations that update a parent record, 

1193 whether it is skipped or updated after deblending, 

1194 update all of the appropriate columns. 

1195 

1196 Parameters 

1197 ---------- 

1198 parent : `lsst.afw.table.source.source.SourceRecord` 

1199 The parent record to update. 

1200 nPeaks : `int` 

1201 Number of peaks in the parent footprint. 

1202 nChild : `int` 

1203 Number of children deblended from the parent. 

1204 This may differ from `nPeaks` if some of the peaks 

1205 were culled and have no deblended model. 

1206 runtime : `float` 

1207 Total runtime for deblending. 

1208 iterations : `int` 

1209 Total number of iterations in scarlet before convergence. 

1210 logL : `float` 

1211 Final log likelihood of the blend. 

1212 spectrumInit : `bool` 

1213 True when scarlet used `set_spectra` to initialize all 

1214 sources with better initial intensities. 

1215 converged : `bool` 

1216 True when the optimizer reached convergence before 

1217 reaching the maximum number of iterations. 

1218 """ 

1219 parent.set(self.nPeaksKey, nPeaks) 

1220 parent.set(self.nChildKey, nChild) 

1221 parent.set(self.runtimeKey, runtime) 

1222 parent.set(self.iterKey, iterations) 

1223 parent.set(self.scarletLogLKey, logL) 

1224 parent.set(self.scarletSpectrumInitKey, spectrumInit) 

1225 parent.set(self.blendConvergenceFailedFlagKey, converged) 

1226 

1227 def _addChild(self, parent, mHeavy, catalog, scarletSource): 

1228 """Add a child to a catalog. 

1229 

1230 This creates a new child in the source catalog, 

1231 assigning it a parent id, and adding all columns 

1232 that are independent across all filter bands. 

1233 

1234 Parameters 

1235 ---------- 

1236 parent : `lsst.afw.table.source.source.SourceRecord` 

1237 The parent of the new child record. 

1238 mHeavy : `lsst.detection.MultibandFootprint` 

1239 The multi-band footprint containing the model and 

1240 peak catalog for the new child record. 

1241 catalog : `lsst.afw.table.source.source.SourceCatalog` 

1242 The merged `SourceCatalog` that contains parent footprints 

1243 to (potentially) deblend. 

1244 scarletSource : `scarlet.Component` 

1245 The scarlet model for the new source record. 

1246 """ 

1247 src = catalog.addNew() 

1248 for key in self.toCopyFromParent: 

1249 src.set(key, parent.get(key)) 

1250 # The peak catalog is the same for all bands, 

1251 # so we just use the first peak catalog 

1252 peaks = mHeavy[mHeavy.filters[0]].peaks 

1253 src.assign(peaks[0], self.peakSchemaMapper) 

1254 src.setParent(parent.getId()) 

1255 # Currently all children only have a single peak, 

1256 # but it's possible in the future that there will be hierarchical 

1257 # deblending, so we use the footprint to set the number of peaks 

1258 # for each child. 

1259 src.set(self.nPeaksKey, len(peaks)) 

1260 # Set the psf key based on whether or not the source was 

1261 # deblended using the PointSource model. 

1262 # This key is not that useful anymore since we now keep track of 

1263 # `modelType`, but we continue to propagate it in case code downstream 

1264 # is expecting it. 

1265 src.set(self.psfKey, scarletSource.__class__.__name__ == "PointSource") 

1266 src.set(self.modelTypeKey, scarletSource.__class__.__name__) 

1267 # We set the runtime to zero so that summing up the 

1268 # runtime column will give the total time spent 

1269 # running the deblender for the catalog. 

1270 src.set(self.runtimeKey, 0) 

1271 

1272 # Set the position of the peak from the parent footprint 

1273 # This will make it easier to match the same source across 

1274 # deblenders and across observations, where the peak 

1275 # position is unlikely to change unless enough time passes 

1276 # for a source to move on the sky. 

1277 peak = scarletSource.detectedPeak 

1278 src.set(self.peakCenter, Point2I(peak["i_x"], peak["i_y"])) 

1279 src.set(self.peakIdKey, peak["id"]) 

1280 

1281 # Propagate columns from the parent to the child 

1282 for parentColumn, childColumn in self.config.columnInheritance.items(): 

1283 src.set(childColumn, parent.get(parentColumn)) 

1284 

1285 def _getCenterFlux(self, mHeavy, scarletSource, xy0): 

1286 """Get the flux at the center of a HeavyFootprint 

1287 

1288 Parameters 

1289 ---------- 

1290 mHeavy : `lsst.detection.MultibandFootprint` 

1291 The multi-band footprint containing the model for the source. 

1292 scarletSource : `scarlet.Component` 

1293 The scarlet model for the heavy footprint 

1294 """ 

1295 # Store the flux at the center of the model and the total 

1296 # scarlet flux measurement. 

1297 mImage = mHeavy.getImage(fill=0.0).image 

1298 

1299 # Set the flux at the center of the model (for SNR) 

1300 try: 

1301 cy, cx = scarletSource.center 

1302 cy += xy0.y 

1303 cx += xy0.x 

1304 return mImage[:, cx, cy] 

1305 except AttributeError: 

1306 msg = "Did not recognize coordinates for source type of `{0}`, " 

1307 msg += "could not write coordinates or center flux. " 

1308 msg += "Add `{0}` to meas_extensions_scarlet to properly persist this information." 

1309 logger.warning(msg.format(type(scarletSource))) 

1310 return {f: np.nan for f in mImage.filters}