Coverage for python/lsst/meas/extensions/scarlet/scarletDeblendTask.py: 16%

393 statements  

« prev     ^ index     » next       coverage.py v7.4.4, created at 2024-04-19 12:31 +0000

1# This file is part of meas_extensions_scarlet. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (https://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <https://www.gnu.org/licenses/>. 

21 

22from functools import partial 

23import logging 

24import numpy as np 

25 

26import lsst.pex.config as pexConfig 

27import lsst.pipe.base as pipeBase 

28import lsst.geom as geom 

29import lsst.afw.geom.ellipses as afwEll 

30import lsst.afw.image as afwImage 

31import lsst.afw.detection as afwDet 

32import lsst.afw.table as afwTable 

33import lsst.scarlet.lite as scl 

34from lsst.utils.logging import PeriodicLogger 

35from lsst.utils.timer import timeMethod 

36 

37from . import io 

38from .utils import bboxToScarletBox, defaultBadPixelMasks, buildObservation, computePsfKernelImage 

39 

40# Scarlet and proxmin have a different definition of log levels than the stack, 

41# so even "warnings" occur far more often than we would like. 

42# So for now we only display scarlet and proxmin errors, as all other 

43# scarlet outputs would be considered "TRACE" by our standards. 

44scarletLogger = logging.getLogger("scarlet") 

45scarletLogger.setLevel(logging.ERROR) 

46proxminLogger = logging.getLogger("proxmin") 

47proxminLogger.setLevel(logging.ERROR) 

48 

49__all__ = ["deblend", "deblend_old_lite", "ScarletDeblendConfig", "ScarletDeblendTask"] 

50 

51logger = logging.getLogger(__name__) 

52 

53 

54class ScarletGradientError(Exception): 

55 """An error occurred during optimization 

56 

57 This error occurs when the optimizer encounters 

58 a NaN value while calculating the gradient. 

59 """ 

60 def __init__(self, iterations, sources): 

61 self.iterations = iterations 

62 self.sources = sources 

63 msg = ("ScalarGradientError in iteration {0}. " 

64 "NaN values introduced in sources {1}") 

65 self.message = msg.format(iterations, sources) 

66 

67 def __str__(self): 

68 return self.message 

69 

70 

71def _checkBlendConvergence(blend, f_rel): 

72 """Check whether or not a blend has converged 

73 """ 

74 deltaLoss = np.abs(blend.loss[-2] - blend.loss[-1]) 

75 convergence = f_rel * np.abs(blend.loss[-1]) 

76 return deltaLoss < convergence 

77 

78 

79def isPseudoSource(source, pseudoColumns): 

80 """Check if a source is a pseudo source. 

81 

82 This is mostly for skipping sky objects, 

83 but any other column can also be added to disable 

84 deblending on a parent or individual source when 

85 set to `True`. 

86 

87 Parameters 

88 ---------- 

89 source : `lsst.afw.table.source.source.SourceRecord` 

90 The source to check for the pseudo bit. 

91 pseudoColumns : `list` of `str` 

92 A list of columns to check for pseudo sources. 

93 """ 

94 isPseudo = False 

95 for col in pseudoColumns: 

96 try: 

97 isPseudo |= source[col] 

98 except KeyError: 

99 pass 

100 return isPseudo 

101 

102 

103def deblend(mExposure, modelPsf, footprint, config, spectrumInit, monotonicity, wavelets=None): 

104 """Deblend a parent footprint 

105 

106 Parameters 

107 ---------- 

108 mExposure : `lsst.image.MultibandExposure` 

109 - The multiband exposure containing the image, 

110 mask, and variance data 

111 footprint : `lsst.detection.Footprint` 

112 - The footprint of the parent to deblend 

113 config : `ScarletDeblendConfig` 

114 - Configuration of the deblending task 

115 spectrumInit : `bool` 

116 Whether or not to initialize the model using the spectrum. 

117 monotonicity: `lsst.scarlet.lite.operators.Monotonicity` 

118 The monotonicity operator. 

119 wavelets : `numpy.ndarray` 

120 Pre-generated wavelets to use if using wavelet initialization. 

121 

122 Returns 

123 ------- 

124 blend : `scarlet.lite.Blend` 

125 The blend this is to be deblended 

126 skippedSources : `list[int]` 

127 Indices of sources that were skipped due to no flux. 

128 This usually means that a source was a spurrious detection in one 

129 band that should not have been included in the merged catalog. 

130 skippedBands : `list[str]` 

131 Bands that were skipped because a PSF could not be generated for them. 

132 """ 

133 # Extract coordinates from each MultiColorPeak 

134 bbox = footprint.getBBox() 

135 psfCenter = footprint.getCentroid() 

136 

137 observation = buildObservation( 

138 modelPsf=modelPsf, 

139 psfCenter=psfCenter, 

140 mExposure=mExposure[:, bbox], 

141 footprint=footprint, 

142 badPixelMasks=config.badMask, 

143 useWeights=config.useWeights, 

144 convolutionType=config.convolutionType, 

145 ) 

146 

147 # Convert the peaks into an array 

148 peaks = [ 

149 np.array([peak.getIy(), peak.getIx()], dtype=int) 

150 for peak in footprint.peaks 

151 if not isPseudoSource(peak, config.pseudoColumns) 

152 ] 

153 

154 # Initialize the sources 

155 if config.morphImage == "chi2": 

156 sources = scl.initialization.FactorizedChi2Initialization( 

157 observation=observation, 

158 centers=peaks, 

159 min_snr=config.minSNR, 

160 monotonicity=monotonicity, 

161 thresh=config.backgroundThresh, 

162 ).sources 

163 elif config.morphImage == "wavelet": 

164 _bbox = bboxToScarletBox(len(mExposure.filters), bbox, bbox.getMin()) 

165 _wavelets = wavelets[(slice(None), *_bbox[1:].slices)] 

166 

167 sources = scl.initialization.FactorizedWaveletInitialization( 

168 observation=observation, 

169 centers=peaks, 

170 use_psf=False, 

171 wavelets=_wavelets, 

172 monotonicity=monotonicity, 

173 min_snr=config.minSNR, 

174 thresh=config.backgroundThresh, 

175 ).sources 

176 else: 

177 raise ValueError("morphImage must be either 'chi2' or 'wavelet'.") 

178 

179 blend = scl.Blend(sources, observation) 

180 

181 # Initialize each source with its best fit spectrum 

182 if spectrumInit: 

183 blend.fit_spectra() 

184 

185 # Set the optimizer 

186 if config.optimizer == "adaprox": 

187 blend.parameterize(partial( 

188 scl.component.default_adaprox_parameterization, 

189 noise_rms=observation.noise_rms/10, 

190 )) 

191 elif config.optimizer == "fista": 

192 blend.parameterize(scl.component.default_fista_parameterization) 

193 else: 

194 raise ValueError("Unrecognized optimizer. Must be either 'adaprox' or 'fista'.") 

195 

196 blend.fit( 

197 max_iter=config.maxIter, 

198 e_rel=config.relativeError, 

199 min_iter=config.minIter, 

200 ) 

201 

202 # Attach the peak to all of the initialized sources 

203 for k, center in enumerate(peaks): 

204 # This is just to make sure that there isn't a coding bug 

205 if len(sources[k].components) > 0 and np.any(sources[k].center != center): 

206 raise ValueError(f"Misaligned center, expected {center} but got {sources[k].center}") 

207 # Store the record for the peak with the appropriate source 

208 sources[k].detectedPeak = footprint.peaks[k] 

209 

210 # Set the sources that could not be initialized and were skipped 

211 skippedSources = [src for src in sources if src.is_null] 

212 

213 # Store the location of the PSF center for storage 

214 blend.psfCenter = (psfCenter.x, psfCenter.y) 

215 

216 # Calculate the bands that were skipped 

217 skippedBands = [band for band in mExposure.filters if band not in observation.bands] 

218 

219 return blend, skippedSources, skippedBands 

220 

221 

222def buildOldObservation( 

223 modelPsf, 

224 psfCenter, 

225 mExposure, 

226 footprint=None, 

227 badPixelMasks=None, 

228 useWeights=True, 

229 convolutionType="real", 

230): 

231 """Generate a LiteObservation from a set of parameters. 

232 

233 Make the generation and reconstruction of a scarlet model consistent 

234 by building a `LiteObservation` from a set of parameters. 

235 

236 Parameters 

237 ---------- 

238 modelPsf : `numpy.ndarray` 

239 The 2D model of the PSF in the partially deconvolved space. 

240 psfCenter : `tuple` or `Point2I` or `Point2D` 

241 The location `(x, y)` used as the center of the PSF. 

242 mExposure : `lsst.afw.image.multiband.MultibandExposure` 

243 The multi-band exposure that the model represents. 

244 If `mExposure` is `None` then no image, variance, or weights are 

245 attached to the observation. 

246 footprint : `lsst.afw.detection.Footprint` 

247 The footprint that is being fit. 

248 If `Footprint` is `None` then the weights are not updated to mask 

249 out pixels not contained in the footprint. 

250 badPixelMasks : `list` of `str` 

251 The keys from the bit mask plane used to mask out pixels 

252 during the fit. 

253 If `badPixelMasks` is `None` then the default values from 

254 `ScarletDeblendConfig.badMask` is used. 

255 useWeights : `bool` 

256 Whether or not fitting should use inverse variance weights to 

257 calculate the log-likelihood. 

258 convolutionType : `str` 

259 The type of convolution to use (either "real" or "fft"). 

260 When reconstructing an image it is advised to use "real" to avoid 

261 polluting the footprint with 

262 

263 Returns 

264 ------- 

265 observation : `scarlet.lite.LiteObservation` 

266 The observation constructed from the input parameters. 

267 """ 

268 from scarlet import lite 

269 # Initialize the observed PSFs 

270 psfModels, mExposure = computePsfKernelImage(mExposure, psfCenter) 

271 

272 # Use the inverse variance as the weights 

273 if useWeights: 

274 weights = 1/mExposure.variance.array 

275 else: 

276 # Mask out bad pixels 

277 weights = np.ones_like(mExposure.image.array) 

278 if badPixelMasks is None: 

279 badPixelMasks = ScarletDeblendConfig().badMask 

280 badPixels = mExposure.mask.getPlaneBitMask(badPixelMasks) 

281 mask = mExposure.mask.array & badPixels 

282 weights[mask > 0] = 0 

283 

284 if footprint is not None: 

285 # Mask out the pixels outside the footprint 

286 weights *= footprint.spans.asArray() 

287 

288 observation = lite.LiteObservation( 

289 images=mExposure.image.array, 

290 variance=mExposure.variance.array, 

291 weights=weights, 

292 psfs=psfModels, 

293 model_psf=modelPsf[None, :, :], 

294 convolution_mode=convolutionType, 

295 ) 

296 

297 # Store the bands used to create the observation 

298 observation.bands = mExposure.filters 

299 return observation 

300 

301 

302def deblend_old_lite(mExposure, modelPsf, footprint, config, spectrumInit, wavelets=None): 

303 """Deblend a parent footprint 

304 

305 Parameters 

306 ---------- 

307 mExposure : `lsst.image.MultibandExposure` 

308 - The multiband exposure containing the image, 

309 mask, and variance data 

310 footprint : `lsst.detection.Footprint` 

311 - The footprint of the parent to deblend 

312 config : `ScarletDeblendConfig` 

313 - Configuration of the deblending task 

314 

315 Returns 

316 ------- 

317 blend : `scarlet.lite.Blend` 

318 The blend this is to be deblended 

319 skippedSources : `list[int]` 

320 Indices of sources that were skipped due to no flux. 

321 This usually means that a source was a spurrious detection in one 

322 band that should not have been included in the merged catalog. 

323 skippedBands : `list[str]` 

324 Bands that were skipped because a PSF could not be generated for them. 

325 """ 

326 from scarlet import lite 

327 # Extract coordinates from each MultiColorPeak 

328 bbox = footprint.getBBox() 

329 psfCenter = footprint.getCentroid() 

330 

331 observation = buildOldObservation( 

332 modelPsf=modelPsf, 

333 psfCenter=psfCenter, 

334 mExposure=mExposure[:, bbox], 

335 footprint=footprint, 

336 badPixelMasks=config.badMask, 

337 useWeights=config.useWeights, 

338 convolutionType=config.convolutionType, 

339 ) 

340 

341 # Convert the centers to pixel coordinates 

342 xmin = bbox.getMinX() 

343 ymin = bbox.getMinY() 

344 centers = [ 

345 np.array([peak.getIy() - ymin, peak.getIx() - xmin], dtype=int) 

346 for peak in footprint.peaks 

347 if not isPseudoSource(peak, config.pseudoColumns) 

348 ] 

349 

350 # Initialize the sources 

351 if config.morphImage == "chi2": 

352 sources = lite.init_all_sources_main( 

353 observation, 

354 centers, 

355 min_snr=config.minSNR, 

356 thresh=config.morphThresh, 

357 ) 

358 elif config.morphImage == "wavelet": 

359 _bbox = bboxToScarletBox(len(mExposure.filters), bbox, bbox.getMin()) 

360 _wavelets = wavelets[(slice(None), *_bbox[1:].slices)] 

361 sources = lite.init_all_sources_wavelets( 

362 observation, 

363 centers, 

364 use_psf=False, 

365 wavelets=_wavelets, 

366 min_snr=config.minSNR, 

367 ) 

368 else: 

369 raise ValueError("morphImage must be either 'chi2' or 'wavelet'.") 

370 

371 # Set the optimizer 

372 if config.optimizer == "adaprox": 

373 parameterization = partial( 

374 lite.init_adaprox_component, 

375 bg_thresh=config.backgroundThresh, 

376 max_prox_iter=config.maxProxIter, 

377 ) 

378 elif config.optimizer == "fista": 

379 parameterization = partial( 

380 lite.init_fista_component, 

381 bg_thresh=config.backgroundThresh, 

382 ) 

383 else: 

384 raise ValueError("Unrecognized optimizer. Must be either 'adaprox' or 'fista'.") 

385 sources = lite.parameterize_sources(sources, observation, parameterization) 

386 

387 # Attach the peak to all of the initialized sources 

388 for k, center in enumerate(centers): 

389 # This is just to make sure that there isn't a coding bug 

390 if len(sources[k].components) > 0 and np.any(sources[k].center != center): 

391 raise ValueError("Misaligned center, expected {center} but got {sources[k].center}") 

392 # Store the record for the peak with the appropriate source 

393 sources[k].detectedPeak = footprint.peaks[k] 

394 

395 blend = lite.LiteBlend(sources, observation) 

396 

397 # Initialize each source with its best fit spectrum 

398 if spectrumInit: 

399 blend.fit_spectra() 

400 

401 # Set the sources that could not be initialized and were skipped 

402 skippedSources = [src for src in sources if src.is_null] 

403 

404 blend.fit( 

405 max_iter=config.maxIter, 

406 e_rel=config.relativeError, 

407 min_iter=config.minIter, 

408 reweight=False, 

409 ) 

410 

411 # Store the location of the PSF center for storage 

412 blend.psfCenter = (psfCenter.x, psfCenter.y) 

413 

414 # Calculate the bands that were skipped 

415 skippedBands = [band for band in mExposure.filters if band not in observation.bands] 

416 

417 return blend, skippedSources, skippedBands 

418 

419 

420class ScarletDeblendConfig(pexConfig.Config): 

421 """MultibandDeblendConfig 

422 

423 Configuration for the multiband deblender. 

424 The parameters are organized by the parameter types, which are 

425 - Stopping Criteria: Used to determine if the fit has converged 

426 - Position Fitting Criteria: Used to fit the positions of the peaks 

427 - Constraints: Used to apply constraints to the peaks and their components 

428 - Other: Parameters that don't fit into the above categories 

429 """ 

430 # Stopping Criteria 

431 minIter = pexConfig.Field(dtype=int, default=15, 

432 doc="Minimum number of iterations before the optimizer is allowed to stop.") 

433 maxIter = pexConfig.Field(dtype=int, default=300, 

434 doc=("Maximum number of iterations to deblend a single parent")) 

435 relativeError = pexConfig.Field(dtype=float, default=1e-2, 

436 doc=("Change in the loss function between iterations to exit fitter. " 

437 "Typically this is `1e-2` if measurements will be made on the " 

438 "flux re-distributed models and `1e-4` when making measurements " 

439 "on the models themselves.")) 

440 

441 # Constraints 

442 morphThresh = pexConfig.Field(dtype=float, default=1, 

443 doc="Fraction of background RMS a pixel must have" 

444 "to be included in the initial morphology") 

445 # Lite Parameters 

446 # All of these parameters (except version) are only valid if version='lite' 

447 version = pexConfig.ChoiceField( 

448 dtype=str, 

449 default="lite", 

450 allowed={ 

451 "old_lite": "scarlet lite from the scarlet main package", 

452 "lite": "LSST optimized version of scarlet for survey data from a single instrument", 

453 }, 

454 doc="The version of scarlet to use.", 

455 ) 

456 optimizer = pexConfig.ChoiceField( 

457 dtype=str, 

458 default="adaprox", 

459 allowed={ 

460 "adaprox": "Proximal ADAM optimization", 

461 "fista": "Accelerated proximal gradient method", 

462 }, 

463 doc="The optimizer to use for fitting parameters and is only used when version='lite'", 

464 ) 

465 morphImage = pexConfig.ChoiceField( 

466 dtype=str, 

467 default="chi2", 

468 allowed={ 

469 "chi2": "Initialize sources on a chi^2 image made from all available bands", 

470 "wavelet": "Initialize sources using a wavelet decomposition of the chi^2 image", 

471 }, 

472 doc="The type of image to use for initializing the morphology. " 

473 "Must be either 'chi2' or 'wavelet'. " 

474 ) 

475 backgroundThresh = pexConfig.Field( 

476 dtype=float, 

477 default=0.25, 

478 doc="Fraction of background to use for a sparsity threshold. " 

479 "This prevents sources from growing unrealistically outside " 

480 "the parent footprint while still modeling flux correctly " 

481 "for bright sources." 

482 ) 

483 maxProxIter = pexConfig.Field( 

484 dtype=int, 

485 default=1, 

486 doc="Maximum number of proximal operator iterations inside of each " 

487 "iteration of the optimizer. " 

488 "This config field is only used if version='lite' and optimizer='adaprox'." 

489 ) 

490 waveletScales = pexConfig.Field( 

491 dtype=int, 

492 default=5, 

493 doc="Number of wavelet scales to use for wavelet initialization. " 

494 "This field is only used when `version`='lite' and `morphImage`='wavelet'." 

495 ) 

496 

497 # Other scarlet paremeters 

498 useWeights = pexConfig.Field( 

499 dtype=bool, default=True, 

500 doc=("Whether or not use use inverse variance weighting." 

501 "If `useWeights` is `False` then flat weights are used")) 

502 modelPsfSize = pexConfig.Field( 

503 dtype=int, default=11, 

504 doc="Model PSF side length in pixels") 

505 modelPsfSigma = pexConfig.Field( 

506 dtype=float, default=0.8, 

507 doc="Define sigma for the model frame PSF") 

508 minSNR = pexConfig.Field( 

509 dtype=float, default=50, 

510 doc="Minimum Signal to noise to accept the source." 

511 "Sources with lower flux will be initialized with the PSF but updated " 

512 "like an ordinary ExtendedSource (known in scarlet as a `CompactSource`).") 

513 saveTemplates = pexConfig.Field( 

514 dtype=bool, default=True, 

515 doc="Whether or not to save the SEDs and templates") 

516 processSingles = pexConfig.Field( 

517 dtype=bool, default=True, 

518 doc="Whether or not to process isolated sources in the deblender") 

519 convolutionType = pexConfig.Field( 

520 dtype=str, default="fft", 

521 doc="Type of convolution to render the model to the observations.\n" 

522 "- 'fft': perform convolutions in Fourier space\n" 

523 "- 'real': peform convolutions in real space.") 

524 sourceModel = pexConfig.Field( 

525 dtype=str, default="double", 

526 doc=("How to determine which model to use for sources, from\n" 

527 "- 'single': use a single component for all sources\n" 

528 "- 'double': use a bulge disk model for all sources\n" 

529 "- 'compact': use a single component model, initialzed with a point source morphology, " 

530 " for all sources\n" 

531 "- 'point': use a point-source model for all sources\n" 

532 "- 'fit: use a PSF fitting model to determine the number of components (not yet implemented)"), 

533 deprecated="This field will be deprecated when the default for `version` is changed to `lite`.", 

534 ) 

535 setSpectra = pexConfig.Field( 

536 dtype=bool, default=True, 

537 doc="Whether or not to solve for the best-fit spectra during initialization. " 

538 "This makes initialization slightly longer, as it requires a convolution " 

539 "to set the optimal spectra, but results in a much better initial log-likelihood " 

540 "and reduced total runtime, with convergence in fewer iterations." 

541 "This option is only used when " 

542 "peaks*area < `maxSpectrumCutoff` will use the improved initialization.") 

543 

544 # Mask-plane restrictions 

545 badMask = pexConfig.ListField( 

546 dtype=str, default=defaultBadPixelMasks, 

547 doc="Whether or not to process isolated sources in the deblender") 

548 statsMask = pexConfig.ListField(dtype=str, default=["SAT", "INTRP", "NO_DATA"], 

549 doc="Mask planes to ignore when performing statistics") 

550 maskLimits = pexConfig.DictField( 

551 keytype=str, 

552 itemtype=float, 

553 default={}, 

554 doc=("Mask planes with the corresponding limit on the fraction of masked pixels. " 

555 "Sources violating this limit will not be deblended. " 

556 "If the fraction is `0` then the limit is a single pixel."), 

557 ) 

558 

559 # Size restrictions 

560 maxNumberOfPeaks = pexConfig.Field( 

561 dtype=int, default=200, 

562 doc=("Only deblend the brightest maxNumberOfPeaks peaks in the parent" 

563 " (<= 0: unlimited)")) 

564 maxFootprintArea = pexConfig.Field( 

565 dtype=int, default=100_000, 

566 doc=("Maximum area for footprints before they are ignored as large; " 

567 "non-positive means no threshold applied")) 

568 maxAreaTimesPeaks = pexConfig.Field( 

569 dtype=int, default=10_000_000, 

570 doc=("Maximum rectangular footprint area * nPeaks in the footprint. " 

571 "This was introduced in DM-33690 to prevent fields that are crowded or have a " 

572 "LSB galaxy that causes memory intensive initialization in scarlet from dominating " 

573 "the overall runtime and/or causing the task to run out of memory. " 

574 "(<= 0: unlimited)") 

575 ) 

576 maxFootprintSize = pexConfig.Field( 

577 dtype=int, default=0, 

578 doc=("Maximum linear dimension for footprints before they are ignored " 

579 "as large; non-positive means no threshold applied")) 

580 minFootprintAxisRatio = pexConfig.Field( 

581 dtype=float, default=0.0, 

582 doc=("Minimum axis ratio for footprints before they are ignored " 

583 "as large; non-positive means no threshold applied")) 

584 maxSpectrumCutoff = pexConfig.Field( 

585 dtype=int, default=1_000_000, 

586 doc=("Maximum number of pixels * number of sources in a blend. " 

587 "This is different than `maxFootprintArea` because this isn't " 

588 "the footprint area but the area of the bounding box that " 

589 "contains the footprint, and is also multiplied by the number of" 

590 "sources in the footprint. This prevents large skinny blends with " 

591 "a high density of sources from running out of memory. " 

592 "If `maxSpectrumCutoff == -1` then there is no cutoff.") 

593 ) 

594 # Failure modes 

595 fallback = pexConfig.Field( 

596 dtype=bool, default=True, 

597 doc="Whether or not to fallback to a smaller number of components if a source does not initialize" 

598 ) 

599 notDeblendedMask = pexConfig.Field( 

600 dtype=str, default="NOT_DEBLENDED", optional=True, 

601 doc="Mask name for footprints not deblended, or None") 

602 catchFailures = pexConfig.Field( 

603 dtype=bool, default=True, 

604 doc=("If True, catch exceptions thrown by the deblender, log them, " 

605 "and set a flag on the parent, instead of letting them propagate up")) 

606 

607 # Other options 

608 columnInheritance = pexConfig.DictField( 

609 keytype=str, itemtype=str, default={ 

610 "deblend_nChild": "deblend_parentNChild", 

611 "deblend_nPeaks": "deblend_parentNPeaks", 

612 "deblend_spectrumInitFlag": "deblend_spectrumInitFlag", 

613 "deblend_blendConvergenceFailedFlag": "deblend_blendConvergenceFailedFlag", 

614 }, 

615 doc="Columns to pass from the parent to the child. " 

616 "The key is the name of the column for the parent record, " 

617 "the value is the name of the column to use for the child." 

618 ) 

619 pseudoColumns = pexConfig.ListField( 

620 dtype=str, default=['merge_peak_sky', 'sky_source'], 

621 doc="Names of flags which should never be deblended." 

622 ) 

623 

624 # Testing options 

625 # Some obs packages and ci packages run the full pipeline on a small 

626 # subset of data to test that the pipeline is functioning properly. 

627 # This is not meant as scientific validation, so it can be useful 

628 # to only run on a small subset of the data that is large enough to 

629 # test the desired pipeline features but not so long that the deblender 

630 # is the tall pole in terms of execution times. 

631 useCiLimits = pexConfig.Field( 

632 dtype=bool, default=False, 

633 doc="Limit the number of sources deblended for CI to prevent long build times") 

634 ciDeblendChildRange = pexConfig.ListField( 

635 dtype=int, default=[5, 10], 

636 doc="Only deblend parent Footprints with a number of peaks in the (inclusive) range indicated." 

637 "If `useCiLimits==False` then this parameter is ignored.") 

638 ciNumParentsToDeblend = pexConfig.Field( 

639 dtype=int, default=10, 

640 doc="Only use the first `ciNumParentsToDeblend` parent footprints with a total peak count " 

641 "within `ciDebledChildRange`. " 

642 "If `useCiLimits==False` then this parameter is ignored.") 

643 

644 

645class ScarletDeblendTask(pipeBase.Task): 

646 """ScarletDeblendTask 

647 

648 Split blended sources into individual sources. 

649 

650 This task has no return value; it only modifies the SourceCatalog in-place. 

651 """ 

652 ConfigClass = ScarletDeblendConfig 

653 _DefaultName = "scarletDeblend" 

654 

655 def __init__(self, schema, peakSchema=None, **kwargs): 

656 """Create the task, adding necessary fields to the given schema. 

657 

658 Parameters 

659 ---------- 

660 schema : `lsst.afw.table.schema.schema.Schema` 

661 Schema object for measurement fields; will be modified in-place. 

662 peakSchema : `lsst.afw.table.schema.schema.Schema` 

663 Schema of Footprint Peaks that will be passed to the deblender. 

664 Any fields beyond the PeakTable minimal schema will be transferred 

665 to the main source Schema. If None, no fields will be transferred 

666 from the Peaks. 

667 filters : list of str 

668 Names of the filters used for the eposures. This is needed to store 

669 the SED as a field 

670 **kwargs 

671 Passed to Task.__init__. 

672 """ 

673 pipeBase.Task.__init__(self, **kwargs) 

674 

675 peakMinimalSchema = afwDet.PeakTable.makeMinimalSchema() 

676 if peakSchema is None: 

677 # In this case, the peakSchemaMapper will transfer nothing, but 

678 # we'll still have one 

679 # to simplify downstream code 

680 self.peakSchemaMapper = afwTable.SchemaMapper(peakMinimalSchema, schema) 

681 else: 

682 self.peakSchemaMapper = afwTable.SchemaMapper(peakSchema, schema) 

683 for item in peakSchema: 

684 if item.key not in peakMinimalSchema: 

685 self.peakSchemaMapper.addMapping(item.key, item.field) 

686 # Because SchemaMapper makes a copy of the output schema 

687 # you give its ctor, it isn't updating this Schema in 

688 # place. That's probably a design flaw, but in the 

689 # meantime, we'll keep that schema in sync with the 

690 # peakSchemaMapper.getOutputSchema() manually, by adding 

691 # the same fields to both. 

692 schema.addField(item.field) 

693 assert schema == self.peakSchemaMapper.getOutputSchema(), "Logic bug mapping schemas" 

694 self._addSchemaKeys(schema) 

695 self.schema = schema 

696 self.toCopyFromParent = [item.key for item in self.schema 

697 if item.field.getName().startswith("merge_footprint")] 

698 

699 def _addSchemaKeys(self, schema): 

700 """Add deblender specific keys to the schema 

701 """ 

702 # Parent (blend) fields 

703 self.runtimeKey = schema.addField('deblend_runtime', type=np.float32, doc='runtime in ms') 

704 self.iterKey = schema.addField('deblend_iterations', type=np.int32, doc='iterations to converge') 

705 self.nChildKey = schema.addField('deblend_nChild', type=np.int32, 

706 doc='Number of children this object has (defaults to 0)') 

707 self.nPeaksKey = schema.addField("deblend_nPeaks", type=np.int32, 

708 doc="Number of initial peaks in the blend. " 

709 "This includes peaks that may have been culled " 

710 "during deblending or failed to deblend") 

711 # Skipped flags 

712 self.deblendSkippedKey = schema.addField('deblend_skipped', type='Flag', 

713 doc="Deblender skipped this source") 

714 self.isolatedParentKey = schema.addField('deblend_isolatedParent', type='Flag', 

715 doc='The source has only a single peak ' 

716 'and was not deblended') 

717 self.pseudoKey = schema.addField('deblend_isPseudo', type='Flag', 

718 doc='The source is identified as a "pseudo" source and ' 

719 'was not deblended') 

720 self.tooManyPeaksKey = schema.addField('deblend_tooManyPeaks', type='Flag', 

721 doc='Source had too many peaks; ' 

722 'only the brightest were included') 

723 self.tooBigKey = schema.addField('deblend_parentTooBig', type='Flag', 

724 doc='Parent footprint covered too many pixels') 

725 self.maskedKey = schema.addField('deblend_masked', type='Flag', 

726 doc='Parent footprint had too many masked pixels') 

727 # Convergence flags 

728 self.sedNotConvergedKey = schema.addField('deblend_sedConvergenceFailed', type='Flag', 

729 doc='scarlet sed optimization did not converge before' 

730 'config.maxIter') 

731 self.morphNotConvergedKey = schema.addField('deblend_morphConvergenceFailed', type='Flag', 

732 doc='scarlet morph optimization did not converge before' 

733 'config.maxIter') 

734 self.blendConvergenceFailedFlagKey = schema.addField('deblend_blendConvergenceFailedFlag', 

735 type='Flag', 

736 doc='at least one source in the blend' 

737 'failed to converge') 

738 # Error flags 

739 self.deblendFailedKey = schema.addField('deblend_failed', type='Flag', 

740 doc="Deblending failed on source") 

741 self.deblendErrorKey = schema.addField('deblend_error', type="String", size=25, 

742 doc='Name of error if the blend failed') 

743 self.incompleteDataKey = schema.addField('deblend_incompleteData', type='Flag', 

744 doc='True when a blend has at least one band ' 

745 'that could not generate a PSF and was ' 

746 'not included in the model.') 

747 # Deblended source fields 

748 self.peakCenter = afwTable.Point2IKey.addFields(schema, name="deblend_peak_center", 

749 doc="Center used to apply constraints in scarlet", 

750 unit="pixel") 

751 self.peakIdKey = schema.addField("deblend_peakId", type=np.int32, 

752 doc="ID of the peak in the parent footprint. " 

753 "This is not unique, but the combination of 'parent'" 

754 "and 'peakId' should be for all child sources. " 

755 "Top level blends with no parents have 'peakId=0'") 

756 self.modelCenterFlux = schema.addField('deblend_peak_instFlux', type=float, units='count', 

757 doc="The instFlux at the peak position of deblended mode") 

758 self.modelTypeKey = schema.addField("deblend_modelType", type="String", size=25, 

759 doc="The type of model used, for example " 

760 "MultiExtendedSource, SingleExtendedSource, PointSource") 

761 self.parentNPeaksKey = schema.addField("deblend_parentNPeaks", type=np.int32, 

762 doc="deblend_nPeaks from this records parent.") 

763 self.parentNChildKey = schema.addField("deblend_parentNChild", type=np.int32, 

764 doc="deblend_nChild from this records parent.") 

765 self.scarletFluxKey = schema.addField("deblend_scarletFlux", type=np.float32, 

766 doc="Flux measurement from scarlet") 

767 self.scarletLogLKey = schema.addField("deblend_logL", type=np.float32, 

768 doc="Final logL, used to identify regressions in scarlet.") 

769 self.edgePixelsKey = schema.addField('deblend_edgePixels', type='Flag', 

770 doc='Source had flux on the edge of the parent footprint') 

771 self.scarletSpectrumInitKey = schema.addField("deblend_spectrumInitFlag", type='Flag', 

772 doc="True when scarlet initializes sources " 

773 "in the blend with a more accurate spectrum. " 

774 "The algorithm uses a lot of memory, " 

775 "so large dense blends will use " 

776 "a less accurate initialization.") 

777 self.nComponentsKey = schema.addField("deblend_nComponents", type=np.int32, 

778 doc="Number of components in a ScarletLiteSource. " 

779 "If `config.version != 'lite'`then " 

780 "this column is set to zero.") 

781 self.psfKey = schema.addField('deblend_deblendedAsPsf', type='Flag', 

782 doc='Deblender thought this source looked like a PSF') 

783 self.coverageKey = schema.addField('deblend_dataCoverage', type=np.float32, 

784 doc='Fraction of pixels with data. ' 

785 'In other words, 1 - fraction of pixels with NO_DATA set.') 

786 self.zeroFluxKey = schema.addField("deblend_zeroFlux", type="Flag", 

787 doc="Source has zero flux.") 

788 # Blendedness/classification metrics 

789 self.maxOverlapKey = schema.addField("deblend_maxOverlap", type=np.float32, 

790 doc="Maximum overlap with all of the other neighbors flux " 

791 "combined." 

792 "This is useful as a metric for determining how blended a " 

793 "source is because if it only overlaps with other sources " 

794 "at or below the noise level, it is likely to be a mostly " 

795 "isolated source in the deconvolved model frame.") 

796 self.fluxOverlapKey = schema.addField("deblend_fluxOverlap", type=np.float32, 

797 doc="This is the total flux from neighboring objects that " 

798 "overlaps with this source.") 

799 self.fluxOverlapFractionKey = schema.addField("deblend_fluxOverlapFraction", type=np.float32, 

800 doc="This is the fraction of " 

801 "`flux from neighbors/source flux` " 

802 "for a given source within the source's" 

803 "footprint.") 

804 self.blendednessKey = schema.addField("deblend_blendedness", type=np.float32, 

805 doc="The Bosch et al. 2018 metric for 'blendedness.' ") 

806 

807 @timeMethod 

808 def run(self, mExposure, mergedSources): 

809 """Get the psf from each exposure and then run deblend(). 

810 

811 Parameters 

812 ---------- 

813 mExposure : `MultibandExposure` 

814 The exposures should be co-added images of the same 

815 shape and region of the sky. 

816 mergedSources : `SourceCatalog` 

817 The merged `SourceCatalog` that contains parent footprints 

818 to (potentially) deblend. 

819 

820 Returns 

821 ------- 

822 templateCatalogs: dict 

823 Keys are the names of the filters and the values are 

824 `lsst.afw.table.source.source.SourceCatalog`'s. 

825 These are catalogs with heavy footprints that are the templates 

826 created by the multiband templates. 

827 """ 

828 return self.deblend(mExposure, mergedSources) 

829 

830 @timeMethod 

831 def deblend(self, mExposure, catalog): 

832 """Deblend a data cube of multiband images 

833 

834 Parameters 

835 ---------- 

836 mExposure : `MultibandExposure` 

837 The exposures should be co-added images of the same 

838 shape and region of the sky. 

839 catalog : `SourceCatalog` 

840 The merged `SourceCatalog` that contains parent footprints 

841 to (potentially) deblend. The new deblended sources are 

842 appended to this catalog in place. 

843 

844 Returns 

845 ------- 

846 catalogs : `dict` or `None` 

847 Keys are the names of the filters and the values are 

848 `lsst.afw.table.source.source.SourceCatalog`'s. 

849 These are catalogs with heavy footprints that are the templates 

850 created by the multiband templates. 

851 """ 

852 import time 

853 

854 # Cull footprints if required by ci 

855 if self.config.useCiLimits: 

856 self.log.info("Using CI catalog limits, the original number of sources to deblend was %d.", 

857 len(catalog)) 

858 # Select parents with a number of children in the range 

859 # config.ciDeblendChildRange 

860 minChildren, maxChildren = self.config.ciDeblendChildRange 

861 nPeaks = np.array([len(src.getFootprint().peaks) for src in catalog]) 

862 childrenInRange = np.where((nPeaks >= minChildren) & (nPeaks <= maxChildren))[0] 

863 if len(childrenInRange) < self.config.ciNumParentsToDeblend: 

864 raise ValueError("Fewer than ciNumParentsToDeblend children were contained in the range " 

865 "indicated by ciDeblendChildRange. Adjust this range to include more " 

866 "parents.") 

867 # Keep all of the isolated parents and the first 

868 # `ciNumParentsToDeblend` children 

869 parents = nPeaks == 1 

870 children = np.zeros((len(catalog),), dtype=bool) 

871 children[childrenInRange[:self.config.ciNumParentsToDeblend]] = True 

872 catalog = catalog[parents | children] 

873 # We need to update the IdFactory, otherwise the the source ids 

874 # will not be sequential 

875 idFactory = catalog.getIdFactory() 

876 maxId = np.max(catalog["id"]) 

877 idFactory.notify(maxId) 

878 

879 self.log.info("Deblending %d sources in %d exposure bands", len(catalog), len(mExposure)) 

880 periodicLog = PeriodicLogger(self.log) 

881 

882 # Create a set of wavelet coefficients if using wavelet initialization 

883 if self.config.morphImage == "wavelet": 

884 images = mExposure.image.array 

885 variance = mExposure.variance.array 

886 wavelets = scl.detect.get_detect_wavelets(images, variance, scales=self.config.waveletScales) 

887 else: 

888 wavelets = None 

889 

890 # Add the NOT_DEBLENDED mask to the mask plane in each band 

891 if self.config.notDeblendedMask: 

892 for mask in mExposure.mask: 

893 mask.addMaskPlane(self.config.notDeblendedMask) 

894 

895 # Initialize the persistable data model 

896 modelPsf = scl.utils.integrated_circular_gaussian(sigma=self.config.modelPsfSigma) 

897 dataModel = scl.io.ScarletModelData(modelPsf) 

898 

899 # Initialize the monotonicity operator with a size of 101 x 101 pixels. 

900 # Note: If a component is > 101x101 in either axis then the 

901 # monotonicity operator will resize itself. 

902 monotonicity = scl.operators.Monotonicity((101, 101)) 

903 

904 nParents = len(catalog) 

905 nDeblendedParents = 0 

906 skippedParents = [] 

907 for parentIndex in range(nParents): 

908 parent = catalog[parentIndex] 

909 foot = parent.getFootprint() 

910 bbox = foot.getBBox() 

911 peaks = foot.getPeaks() 

912 

913 # Since we use the first peak for the parent object, we should 

914 # propagate its flags to the parent source. 

915 parent.assign(peaks[0], self.peakSchemaMapper) 

916 

917 # Block of conditions for skipping a parent with multiple children 

918 if (skipArgs := self._checkSkipped(parent, mExposure)) is not None: 

919 self._skipParent(parent, *skipArgs) 

920 skippedParents.append(parentIndex) 

921 continue 

922 

923 nDeblendedParents += 1 

924 self.log.trace("Parent %d: deblending %d peaks", parent.getId(), len(peaks)) 

925 # Run the deblender 

926 blendError = None 

927 

928 # Choose whether or not to use improved spectral initialization. 

929 # This significantly cuts down on the number of iterations 

930 # that the optimizer needs and usually results in a better 

931 # fit. 

932 # But using least squares on a very large blend causes memory 

933 # issues, so it is not done for large blends 

934 if self.config.setSpectra: 

935 if self.config.maxSpectrumCutoff <= 0: 

936 spectrumInit = True 

937 else: 

938 spectrumInit = len(foot.peaks) * bbox.getArea() < self.config.maxSpectrumCutoff 

939 else: 

940 spectrumInit = False 

941 

942 try: 

943 t0 = time.monotonic() 

944 # Build the parameter lists with the same ordering 

945 if self.config.version == "lite": 

946 blend, skippedSources, skippedBands = deblend( 

947 mExposure=mExposure, 

948 modelPsf=modelPsf, 

949 footprint=foot, 

950 config=self.config, 

951 spectrumInit=spectrumInit, 

952 wavelets=wavelets, 

953 monotonicity=monotonicity, 

954 ) 

955 elif self.config.version == "old_lite": 

956 blend, skippedSources, skippedBands = deblend_old_lite( 

957 mExposure=mExposure, 

958 modelPsf=modelPsf, 

959 footprint=foot, 

960 config=self.config, 

961 spectrumInit=spectrumInit, 

962 wavelets=wavelets, 

963 ) 

964 tf = time.monotonic() 

965 runtime = (tf-t0)*1000 

966 converged = _checkBlendConvergence(blend, self.config.relativeError) 

967 # Store the number of components in the blend 

968 nComponents = len(blend.components) 

969 nChild = len(blend.sources) 

970 parent.set(self.incompleteDataKey, len(skippedBands) > 0) 

971 # Catch all errors and filter out the ones that we know about 

972 except Exception as e: 

973 blendError = type(e).__name__ 

974 if isinstance(e, ScarletGradientError): 

975 parent.set(self.iterKey, e.iterations) 

976 else: 

977 blendError = "UnknownError" 

978 if self.config.catchFailures: 

979 # Make it easy to find UnknownErrors in the log file 

980 self.log.warn("UnknownError") 

981 import traceback 

982 traceback.print_exc() 

983 else: 

984 raise 

985 

986 self._skipParent( 

987 parent=parent, 

988 skipKey=self.deblendFailedKey, 

989 logMessage=f"Unable to deblend source {parent.getId}: {blendError}", 

990 ) 

991 parent.set(self.deblendErrorKey, blendError) 

992 skippedParents.append(parentIndex) 

993 continue 

994 

995 # Update the parent record with the deblending results 

996 self._updateParentRecord( 

997 parent=parent, 

998 nPeaks=len(peaks), 

999 nChild=nChild, 

1000 nComponents=nComponents, 

1001 runtime=runtime, 

1002 iterations=len(blend.loss), 

1003 logL=blend.loss[-1], 

1004 spectrumInit=spectrumInit, 

1005 converged=converged, 

1006 ) 

1007 

1008 # Add each deblended source to the catalog 

1009 for k, scarletSource in enumerate(blend.sources): 

1010 # Skip any sources with no flux or that scarlet skipped because 

1011 # it could not initialize 

1012 if k in skippedSources or (self.config.version == "lite" and scarletSource.is_null): 

1013 # No need to propagate anything 

1014 continue 

1015 parent.set(self.deblendSkippedKey, False) 

1016 

1017 # Add all fields except the HeavyFootprint to the 

1018 # source record 

1019 sourceRecord = self._addChild( 

1020 parent=parent, 

1021 peak=scarletSource.detectedPeak, 

1022 catalog=catalog, 

1023 scarletSource=scarletSource, 

1024 ) 

1025 scarletSource.record_id = sourceRecord.getId() 

1026 scarletSource.peak_id = scarletSource.detectedPeak.getId() 

1027 

1028 # Store the blend information so that it can be persisted 

1029 if self.config.version == "lite": 

1030 blendData = scl.io.ScarletBlendData.from_blend(blend, blend.psfCenter) 

1031 else: 

1032 blendData = io.oldScarletToData(blend, blend.psfCenter, bbox.getMin()) 

1033 dataModel.blends[parent.getId()] = blendData 

1034 

1035 # Log a message if it has been a while since the last log. 

1036 periodicLog.log("Deblended %d parent sources out of %d", parentIndex + 1, nParents) 

1037 

1038 # Clear the cached values in scarlet to clear out memory 

1039 if self.config.version == "old_lite": 

1040 import scarlet 

1041 scarlet.cache.Cache._cache = {} 

1042 

1043 # Update the mExposure mask with the footprint of skipped parents 

1044 if self.config.notDeblendedMask: 

1045 for mask in mExposure.mask: 

1046 for parentIndex in skippedParents: 

1047 fp = catalog[parentIndex].getFootprint() 

1048 fp.spans.setMask(mask, mask.getPlaneBitMask(self.config.notDeblendedMask)) 

1049 

1050 self.log.info("Deblender results: of %d parent sources, %d were deblended, " 

1051 "creating %d children, for a total of %d sources", 

1052 nParents, nDeblendedParents, len(catalog)-nParents, len(catalog)) 

1053 return catalog, dataModel 

1054 

1055 def _isLargeFootprint(self, footprint): 

1056 """Returns whether a Footprint is large 

1057 

1058 'Large' is defined by thresholds on the area, size and axis ratio, 

1059 and total area of the bounding box multiplied by 

1060 the number of children. 

1061 These may be disabled independently by configuring them to be 

1062 non-positive. 

1063 """ 

1064 if self.config.maxFootprintArea > 0 and footprint.getArea() > self.config.maxFootprintArea: 

1065 return True 

1066 if self.config.maxFootprintSize > 0: 

1067 bbox = footprint.getBBox() 

1068 if max(bbox.getWidth(), bbox.getHeight()) > self.config.maxFootprintSize: 

1069 return True 

1070 if self.config.minFootprintAxisRatio > 0: 

1071 axes = afwEll.Axes(footprint.getShape()) 

1072 if axes.getB() < self.config.minFootprintAxisRatio*axes.getA(): 

1073 return True 

1074 if self.config.maxAreaTimesPeaks > 0: 

1075 if footprint.getBBox().getArea() * len(footprint.peaks) > self.config.maxAreaTimesPeaks: 

1076 return True 

1077 return False 

1078 

1079 def _isMasked(self, footprint, mExposure): 

1080 """Returns whether the footprint violates the mask limits 

1081 

1082 Parameters 

1083 ---------- 

1084 footprint : `lsst.afw.detection.Footprint` 

1085 The footprint to check for masked pixels 

1086 mMask : `lsst.afw.image.MaskX` 

1087 The mask plane to check for masked pixels in the `footprint`. 

1088 

1089 Returns 

1090 ------- 

1091 isMasked : `bool` 

1092 `True` if `self.config.maskPlaneLimits` is less than the 

1093 fraction of pixels for a given mask in 

1094 `self.config.maskLimits`. 

1095 """ 

1096 bbox = footprint.getBBox() 

1097 mask = np.bitwise_or.reduce(mExposure.mask[:, bbox].array, axis=0) 

1098 size = float(footprint.getArea()) 

1099 for maskName, limit in self.config.maskLimits.items(): 

1100 maskVal = mExposure.mask.getPlaneBitMask(maskName) 

1101 _mask = afwImage.MaskX(mask & maskVal, xy0=bbox.getMin()) 

1102 # spanset of masked pixels 

1103 maskedSpan = footprint.spans.intersect(_mask, maskVal) 

1104 if (maskedSpan.getArea())/size > limit: 

1105 return True 

1106 return False 

1107 

1108 def _skipParent(self, parent, skipKey, logMessage): 

1109 """Update a parent record that is not being deblended. 

1110 

1111 This is a fairly trivial function but is implemented to ensure 

1112 that a skipped parent updates the appropriate columns 

1113 consistently, and always has a flag to mark the reason that 

1114 it is being skipped. 

1115 

1116 Parameters 

1117 ---------- 

1118 parent : `lsst.afw.table.source.source.SourceRecord` 

1119 The parent record to flag as skipped. 

1120 skipKey : `bool` 

1121 The name of the flag to mark the reason for skipping. 

1122 logMessage : `str` 

1123 The message to display in a log.trace when a source 

1124 is skipped. 

1125 """ 

1126 if logMessage is not None: 

1127 self.log.trace(logMessage) 

1128 self._updateParentRecord( 

1129 parent=parent, 

1130 nPeaks=len(parent.getFootprint().peaks), 

1131 nChild=0, 

1132 nComponents=0, 

1133 runtime=np.nan, 

1134 iterations=0, 

1135 logL=np.nan, 

1136 spectrumInit=False, 

1137 converged=False, 

1138 ) 

1139 

1140 # Mark the source as skipped by the deblender and 

1141 # flag the reason why. 

1142 parent.set(self.deblendSkippedKey, True) 

1143 parent.set(skipKey, True) 

1144 

1145 def _checkSkipped(self, parent, mExposure): 

1146 """Update a parent record that is not being deblended. 

1147 

1148 This is a fairly trivial function but is implemented to ensure 

1149 that a skipped parent updates the appropriate columns 

1150 consistently, and always has a flag to mark the reason that 

1151 it is being skipped. 

1152 

1153 Parameters 

1154 ---------- 

1155 parent : `lsst.afw.table.source.source.SourceRecord` 

1156 The parent record to flag as skipped. 

1157 mExposure : `MultibandExposure` 

1158 The exposures should be co-added images of the same 

1159 shape and region of the sky. 

1160 Returns 

1161 ------- 

1162 skip: `bool` 

1163 `True` if the deblender will skip the parent 

1164 """ 

1165 skipKey = None 

1166 skipMessage = None 

1167 footprint = parent.getFootprint() 

1168 if len(footprint.peaks) < 2 and not self.config.processSingles: 

1169 # Skip isolated sources unless processSingles is turned on. 

1170 # Note: this does not flag isolated sources as skipped or 

1171 # set the NOT_DEBLENDED mask in the exposure, 

1172 # since these aren't really any skipped blends. 

1173 skipKey = self.isolatedParentKey 

1174 elif isPseudoSource(parent, self.config.pseudoColumns): 

1175 # We also skip pseudo sources, like sky objects, which 

1176 # are intended to be skipped. 

1177 skipKey = self.pseudoKey 

1178 if self._isLargeFootprint(footprint): 

1179 # The footprint is above the maximum footprint size limit 

1180 skipKey = self.tooBigKey 

1181 skipMessage = f"Parent {parent.getId()}: skipping large footprint" 

1182 elif self._isMasked(footprint, mExposure): 

1183 # The footprint exceeds the maximum number of masked pixels 

1184 skipKey = self.maskedKey 

1185 skipMessage = f"Parent {parent.getId()}: skipping masked footprint" 

1186 elif self.config.maxNumberOfPeaks > 0 and len(footprint.peaks) > self.config.maxNumberOfPeaks: 

1187 # Unlike meas_deblender, in scarlet we skip the entire blend 

1188 # if the number of peaks exceeds max peaks, since neglecting 

1189 # to model any peaks often results in catastrophic failure 

1190 # of scarlet to generate models for the brighter sources. 

1191 skipKey = self.tooManyPeaksKey 

1192 skipMessage = f"Parent {parent.getId()}: skipping blend with too many peaks" 

1193 if skipKey is not None: 

1194 return (skipKey, skipMessage) 

1195 return None 

1196 

1197 def setSkipFlags(self, mExposure, catalog): 

1198 """Set the skip flags for all of the parent sources 

1199 

1200 This is mostly used for testing which parent sources will be deblended 

1201 and which will be skipped based on the current configuration options. 

1202 Skipped sources will have the appropriate flags set in place in the 

1203 catalog. 

1204 

1205 Parameters 

1206 ---------- 

1207 mExposure : `MultibandExposure` 

1208 The exposures should be co-added images of the same 

1209 shape and region of the sky. 

1210 catalog : `SourceCatalog` 

1211 The merged `SourceCatalog` that contains parent footprints 

1212 to (potentially) deblend. The new deblended sources are 

1213 appended to this catalog in place. 

1214 """ 

1215 for src in catalog: 

1216 if skipArgs := self._checkSkipped(src, mExposure) is not None: 

1217 self._skipParent(src, *skipArgs) 

1218 

1219 def _updateParentRecord(self, parent, nPeaks, nChild, nComponents, 

1220 runtime, iterations, logL, spectrumInit, converged): 

1221 """Update a parent record in all of the single band catalogs. 

1222 

1223 Ensure that all locations that update a parent record, 

1224 whether it is skipped or updated after deblending, 

1225 update all of the appropriate columns. 

1226 

1227 Parameters 

1228 ---------- 

1229 parent : `lsst.afw.table.source.source.SourceRecord` 

1230 The parent record to update. 

1231 nPeaks : `int` 

1232 Number of peaks in the parent footprint. 

1233 nChild : `int` 

1234 Number of children deblended from the parent. 

1235 This may differ from `nPeaks` if some of the peaks 

1236 were culled and have no deblended model. 

1237 nComponents : `int` 

1238 Total number of components in the parent. 

1239 This is usually different than the number of children, 

1240 since it is common for a single source to have multiple 

1241 components. 

1242 runtime : `float` 

1243 Total runtime for deblending. 

1244 iterations : `int` 

1245 Total number of iterations in scarlet before convergence. 

1246 logL : `float` 

1247 Final log likelihood of the blend. 

1248 spectrumInit : `bool` 

1249 True when scarlet used `set_spectra` to initialize all 

1250 sources with better initial intensities. 

1251 converged : `bool` 

1252 True when the optimizer reached convergence before 

1253 reaching the maximum number of iterations. 

1254 """ 

1255 parent.set(self.nPeaksKey, nPeaks) 

1256 parent.set(self.nChildKey, nChild) 

1257 parent.set(self.nComponentsKey, nComponents) 

1258 parent.set(self.runtimeKey, runtime) 

1259 parent.set(self.iterKey, iterations) 

1260 parent.set(self.scarletLogLKey, logL) 

1261 parent.set(self.scarletSpectrumInitKey, spectrumInit) 

1262 parent.set(self.blendConvergenceFailedFlagKey, converged) 

1263 

1264 def _addChild(self, parent, peak, catalog, scarletSource): 

1265 """Add a child to a catalog. 

1266 

1267 This creates a new child in the source catalog, 

1268 assigning it a parent id, and adding all columns 

1269 that are independent across all filter bands. 

1270 

1271 Parameters 

1272 ---------- 

1273 parent : `lsst.afw.table.source.source.SourceRecord` 

1274 The parent of the new child record. 

1275 peak : `lsst.afw.table.PeakRecord` 

1276 The peak record for the peak from the parent peak catalog. 

1277 catalog : `lsst.afw.table.source.source.SourceCatalog` 

1278 The merged `SourceCatalog` that contains parent footprints 

1279 to (potentially) deblend. 

1280 scarletSource : `scarlet.Component` 

1281 The scarlet model for the new source record. 

1282 """ 

1283 src = catalog.addNew() 

1284 for key in self.toCopyFromParent: 

1285 src.set(key, parent.get(key)) 

1286 # The peak catalog is the same for all bands, 

1287 # so we just use the first peak catalog 

1288 src.assign(peak, self.peakSchemaMapper) 

1289 src.setParent(parent.getId()) 

1290 src.set(self.nPeaksKey, 1) 

1291 # Set the psf key based on whether or not the source was 

1292 # deblended using the PointSource model. 

1293 # This key is not that useful anymore since we now keep track of 

1294 # `modelType`, but we continue to propagate it in case code downstream 

1295 # is expecting it. 

1296 src.set(self.psfKey, scarletSource.__class__.__name__ == "PointSource") 

1297 src.set(self.modelTypeKey, scarletSource.__class__.__name__) 

1298 # We set the runtime to zero so that summing up the 

1299 # runtime column will give the total time spent 

1300 # running the deblender for the catalog. 

1301 src.set(self.runtimeKey, 0) 

1302 

1303 # Set the position of the peak from the parent footprint 

1304 # This will make it easier to match the same source across 

1305 # deblenders and across observations, where the peak 

1306 # position is unlikely to change unless enough time passes 

1307 # for a source to move on the sky. 

1308 src.set(self.peakCenter, geom.Point2I(peak["i_x"], peak["i_y"])) 

1309 src.set(self.peakIdKey, peak["id"]) 

1310 

1311 # Store the number of components for the source 

1312 src.set(self.nComponentsKey, len(scarletSource.components)) 

1313 

1314 # Flag sources missing one or more bands 

1315 src.set(self.incompleteDataKey, parent.get(self.incompleteDataKey)) 

1316 

1317 # Propagate columns from the parent to the child 

1318 for parentColumn, childColumn in self.config.columnInheritance.items(): 

1319 src.set(childColumn, parent.get(parentColumn)) 

1320 

1321 return src