Coverage for python/lsst/meas/extensions/scarlet/scarletDeblendTask.py: 16%

392 statements  

« prev     ^ index     » next       coverage.py v7.4.1, created at 2024-01-30 11:54 +0000

1# This file is part of meas_extensions_scarlet. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (https://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <https://www.gnu.org/licenses/>. 

21 

22from functools import partial 

23import logging 

24import numpy as np 

25 

26import lsst.pex.config as pexConfig 

27import lsst.pipe.base as pipeBase 

28import lsst.geom as geom 

29import lsst.afw.geom.ellipses as afwEll 

30import lsst.afw.image as afwImage 

31import lsst.afw.detection as afwDet 

32import lsst.afw.table as afwTable 

33import lsst.scarlet.lite as scl 

34from lsst.utils.logging import PeriodicLogger 

35from lsst.utils.timer import timeMethod 

36 

37from . import io 

38from .utils import bboxToScarletBox, defaultBadPixelMasks, buildObservation, computePsfKernelImage 

39 

40# Scarlet and proxmin have a different definition of log levels than the stack, 

41# so even "warnings" occur far more often than we would like. 

42# So for now we only display scarlet and proxmin errors, as all other 

43# scarlet outputs would be considered "TRACE" by our standards. 

44scarletLogger = logging.getLogger("scarlet") 

45scarletLogger.setLevel(logging.ERROR) 

46proxminLogger = logging.getLogger("proxmin") 

47proxminLogger.setLevel(logging.ERROR) 

48 

49__all__ = ["deblend", "deblend_old_lite", "ScarletDeblendConfig", "ScarletDeblendTask"] 

50 

51logger = logging.getLogger(__name__) 

52 

53 

54class ScarletGradientError(Exception): 

55 """An error occurred during optimization 

56 

57 This error occurs when the optimizer encounters 

58 a NaN value while calculating the gradient. 

59 """ 

60 def __init__(self, iterations, sources): 

61 self.iterations = iterations 

62 self.sources = sources 

63 msg = ("ScalarGradientError in iteration {0}. " 

64 "NaN values introduced in sources {1}") 

65 self.message = msg.format(iterations, sources) 

66 

67 def __str__(self): 

68 return self.message 

69 

70 

71def _checkBlendConvergence(blend, f_rel): 

72 """Check whether or not a blend has converged 

73 """ 

74 deltaLoss = np.abs(blend.loss[-2] - blend.loss[-1]) 

75 convergence = f_rel * np.abs(blend.loss[-1]) 

76 return deltaLoss < convergence 

77 

78 

79def isPseudoSource(source, pseudoColumns): 

80 """Check if a source is a pseudo source. 

81 

82 This is mostly for skipping sky objects, 

83 but any other column can also be added to disable 

84 deblending on a parent or individual source when 

85 set to `True`. 

86 

87 Parameters 

88 ---------- 

89 source : `lsst.afw.table.source.source.SourceRecord` 

90 The source to check for the pseudo bit. 

91 pseudoColumns : `list` of `str` 

92 A list of columns to check for pseudo sources. 

93 """ 

94 isPseudo = False 

95 for col in pseudoColumns: 

96 try: 

97 isPseudo |= source[col] 

98 except KeyError: 

99 pass 

100 return isPseudo 

101 

102 

103def deblend(mExposure, modelPsf, footprint, config, spectrumInit, monotonicity, wavelets=None): 

104 """Deblend a parent footprint 

105 

106 Parameters 

107 ---------- 

108 mExposure : `lsst.image.MultibandExposure` 

109 - The multiband exposure containing the image, 

110 mask, and variance data 

111 footprint : `lsst.detection.Footprint` 

112 - The footprint of the parent to deblend 

113 config : `ScarletDeblendConfig` 

114 - Configuration of the deblending task 

115 spectrumInit : `bool` 

116 Whether or not to initialize the model using the spectrum. 

117 monotonicity: `lsst.scarlet.lite.operators.Monotonicity` 

118 The monotonicity operator. 

119 wavelets : `numpy.ndarray` 

120 Pre-generated wavelets to use if using wavelet initialization. 

121 

122 Returns 

123 ------- 

124 blend : `scarlet.lite.Blend` 

125 The blend this is to be deblended 

126 skippedSources : `list[int]` 

127 Indices of sources that were skipped due to no flux. 

128 This usually means that a source was a spurrious detection in one 

129 band that should not have been included in the merged catalog. 

130 skippedBands : `list[str]` 

131 Bands that were skipped because a PSF could not be generated for them. 

132 """ 

133 # Extract coordinates from each MultiColorPeak 

134 bbox = footprint.getBBox() 

135 psfCenter = footprint.getCentroid() 

136 

137 observation = buildObservation( 

138 modelPsf=modelPsf, 

139 psfCenter=psfCenter, 

140 mExposure=mExposure[:, bbox], 

141 footprint=footprint, 

142 badPixelMasks=config.badMask, 

143 useWeights=config.useWeights, 

144 convolutionType=config.convolutionType, 

145 ) 

146 

147 # Convert the peaks into an array 

148 peaks = [ 

149 np.array([peak.getIy(), peak.getIx()], dtype=int) 

150 for peak in footprint.peaks 

151 if not isPseudoSource(peak, config.pseudoColumns) 

152 ] 

153 

154 # Initialize the sources 

155 if config.morphImage == "chi2": 

156 sources = scl.initialization.FactorizedChi2Initialization( 

157 observation=observation, 

158 centers=peaks, 

159 min_snr=config.minSNR, 

160 monotonicity=monotonicity, 

161 thresh=config.backgroundThresh, 

162 ).sources 

163 elif config.morphImage == "wavelet": 

164 _bbox = bboxToScarletBox(len(mExposure.filters), bbox, bbox.getMin()) 

165 _wavelets = wavelets[(slice(None), *_bbox[1:].slices)] 

166 

167 sources = scl.initialization.FactorizedWaveletInitialization( 

168 observation=observation, 

169 centers=peaks, 

170 use_psf=False, 

171 wavelets=_wavelets, 

172 monotonicity=monotonicity, 

173 min_snr=config.minSNR, 

174 thresh=config.backgroundThresh, 

175 ).sources 

176 else: 

177 raise ValueError("morphImage must be either 'chi2' or 'wavelet'.") 

178 

179 blend = scl.Blend(sources, observation) 

180 

181 # Initialize each source with its best fit spectrum 

182 if spectrumInit: 

183 blend.fit_spectra() 

184 

185 # Set the optimizer 

186 if config.optimizer == "adaprox": 

187 blend.parameterize(partial( 

188 scl.component.default_adaprox_parameterization, 

189 noise_rms=observation.noise_rms/10, 

190 )) 

191 elif config.optimizer == "fista": 

192 blend.parameterize(scl.component.default_fista_parameterization) 

193 else: 

194 raise ValueError("Unrecognized optimizer. Must be either 'adaprox' or 'fista'.") 

195 

196 blend.fit( 

197 max_iter=config.maxIter, 

198 e_rel=config.relativeError, 

199 min_iter=config.minIter, 

200 ) 

201 

202 # Attach the peak to all of the initialized sources 

203 for k, center in enumerate(peaks): 

204 # This is just to make sure that there isn't a coding bug 

205 if len(sources[k].components) > 0 and np.any(sources[k].center != center): 

206 raise ValueError(f"Misaligned center, expected {center} but got {sources[k].center}") 

207 # Store the record for the peak with the appropriate source 

208 sources[k].detectedPeak = footprint.peaks[k] 

209 

210 # Set the sources that could not be initialized and were skipped 

211 skippedSources = [src for src in sources if src.is_null] 

212 

213 # Store the location of the PSF center for storage 

214 blend.psfCenter = (psfCenter.x, psfCenter.y) 

215 

216 # Calculate the bands that were skipped 

217 skippedBands = [band for band in mExposure.filters if band not in observation.bands] 

218 

219 return blend, skippedSources, skippedBands 

220 

221 

222def buildOldObservation( 

223 modelPsf, 

224 psfCenter, 

225 mExposure, 

226 footprint=None, 

227 badPixelMasks=None, 

228 useWeights=True, 

229 convolutionType="real", 

230): 

231 """Generate a LiteObservation from a set of parameters. 

232 

233 Make the generation and reconstruction of a scarlet model consistent 

234 by building a `LiteObservation` from a set of parameters. 

235 

236 Parameters 

237 ---------- 

238 modelPsf : `numpy.ndarray` 

239 The 2D model of the PSF in the partially deconvolved space. 

240 psfCenter : `tuple` or `Point2I` or `Point2D` 

241 The location `(x, y)` used as the center of the PSF. 

242 mExposure : `lsst.afw.image.multiband.MultibandExposure` 

243 The multi-band exposure that the model represents. 

244 If `mExposure` is `None` then no image, variance, or weights are 

245 attached to the observation. 

246 footprint : `lsst.afw.detection.Footprint` 

247 The footprint that is being fit. 

248 If `Footprint` is `None` then the weights are not updated to mask 

249 out pixels not contained in the footprint. 

250 badPixelMasks : `list` of `str` 

251 The keys from the bit mask plane used to mask out pixels 

252 during the fit. 

253 If `badPixelMasks` is `None` then the default values from 

254 `ScarletDeblendConfig.badMask` is used. 

255 useWeights : `bool` 

256 Whether or not fitting should use inverse variance weights to 

257 calculate the log-likelihood. 

258 convolutionType : `str` 

259 The type of convolution to use (either "real" or "fft"). 

260 When reconstructing an image it is advised to use "real" to avoid 

261 polluting the footprint with 

262 

263 Returns 

264 ------- 

265 observation : `scarlet.lite.LiteObservation` 

266 The observation constructed from the input parameters. 

267 """ 

268 from scarlet import lite 

269 # Initialize the observed PSFs 

270 psfModels, mExposure = computePsfKernelImage(mExposure, psfCenter) 

271 

272 # Use the inverse variance as the weights 

273 if useWeights: 

274 weights = 1/mExposure.variance.array 

275 else: 

276 # Mask out bad pixels 

277 weights = np.ones_like(mExposure.image.array) 

278 if badPixelMasks is None: 

279 badPixelMasks = ScarletDeblendConfig().badMask 

280 badPixels = mExposure.mask.getPlaneBitMask(badPixelMasks) 

281 mask = mExposure.mask.array & badPixels 

282 weights[mask > 0] = 0 

283 

284 if footprint is not None: 

285 # Mask out the pixels outside the footprint 

286 weights *= footprint.spans.asArray() 

287 

288 observation = lite.LiteObservation( 

289 images=mExposure.image.array, 

290 variance=mExposure.variance.array, 

291 weights=weights, 

292 psfs=psfModels, 

293 model_psf=modelPsf[None, :, :], 

294 convolution_mode=convolutionType, 

295 ) 

296 

297 # Store the bands used to create the observation 

298 observation.bands = mExposure.filters 

299 return observation 

300 

301 

302def deblend_old_lite(mExposure, modelPsf, footprint, config, spectrumInit, wavelets=None): 

303 """Deblend a parent footprint 

304 

305 Parameters 

306 ---------- 

307 mExposure : `lsst.image.MultibandExposure` 

308 - The multiband exposure containing the image, 

309 mask, and variance data 

310 footprint : `lsst.detection.Footprint` 

311 - The footprint of the parent to deblend 

312 config : `ScarletDeblendConfig` 

313 - Configuration of the deblending task 

314 

315 Returns 

316 ------- 

317 blend : `scarlet.lite.Blend` 

318 The blend this is to be deblended 

319 skippedSources : `list[int]` 

320 Indices of sources that were skipped due to no flux. 

321 This usually means that a source was a spurrious detection in one 

322 band that should not have been included in the merged catalog. 

323 skippedBands : `list[str]` 

324 Bands that were skipped because a PSF could not be generated for them. 

325 """ 

326 from scarlet import lite 

327 # Extract coordinates from each MultiColorPeak 

328 bbox = footprint.getBBox() 

329 psfCenter = footprint.getCentroid() 

330 

331 observation = buildOldObservation( 

332 modelPsf=modelPsf, 

333 psfCenter=psfCenter, 

334 mExposure=mExposure[:, bbox], 

335 footprint=footprint, 

336 badPixelMasks=config.badMask, 

337 useWeights=config.useWeights, 

338 convolutionType=config.convolutionType, 

339 ) 

340 

341 # Convert the centers to pixel coordinates 

342 xmin = bbox.getMinX() 

343 ymin = bbox.getMinY() 

344 centers = [ 

345 np.array([peak.getIy() - ymin, peak.getIx() - xmin], dtype=int) 

346 for peak in footprint.peaks 

347 if not isPseudoSource(peak, config.pseudoColumns) 

348 ] 

349 

350 # Initialize the sources 

351 if config.morphImage == "chi2": 

352 sources = lite.init_all_sources_main( 

353 observation, 

354 centers, 

355 min_snr=config.minSNR, 

356 thresh=config.morphThresh, 

357 ) 

358 elif config.morphImage == "wavelet": 

359 _bbox = bboxToScarletBox(len(mExposure.filters), bbox, bbox.getMin()) 

360 _wavelets = wavelets[(slice(None), *_bbox[1:].slices)] 

361 sources = lite.init_all_sources_wavelets( 

362 observation, 

363 centers, 

364 use_psf=False, 

365 wavelets=_wavelets, 

366 min_snr=config.minSNR, 

367 ) 

368 else: 

369 raise ValueError("morphImage must be either 'chi2' or 'wavelet'.") 

370 

371 # Set the optimizer 

372 if config.optimizer == "adaprox": 

373 parameterization = partial( 

374 lite.init_adaprox_component, 

375 bg_thresh=config.backgroundThresh, 

376 max_prox_iter=config.maxProxIter, 

377 ) 

378 elif config.optimizer == "fista": 

379 parameterization = partial( 

380 lite.init_fista_component, 

381 bg_thresh=config.backgroundThresh, 

382 ) 

383 else: 

384 raise ValueError("Unrecognized optimizer. Must be either 'adaprox' or 'fista'.") 

385 sources = lite.parameterize_sources(sources, observation, parameterization) 

386 

387 # Attach the peak to all of the initialized sources 

388 for k, center in enumerate(centers): 

389 # This is just to make sure that there isn't a coding bug 

390 if len(sources[k].components) > 0 and np.any(sources[k].center != center): 

391 raise ValueError("Misaligned center, expected {center} but got {sources[k].center}") 

392 # Store the record for the peak with the appropriate source 

393 sources[k].detectedPeak = footprint.peaks[k] 

394 

395 blend = lite.LiteBlend(sources, observation) 

396 

397 # Initialize each source with its best fit spectrum 

398 if spectrumInit: 

399 blend.fit_spectra() 

400 

401 # Set the sources that could not be initialized and were skipped 

402 skippedSources = [src for src in sources if src.is_null] 

403 

404 blend.fit( 

405 max_iter=config.maxIter, 

406 e_rel=config.relativeError, 

407 min_iter=config.minIter, 

408 reweight=False, 

409 ) 

410 

411 # Store the location of the PSF center for storage 

412 blend.psfCenter = (psfCenter.x, psfCenter.y) 

413 

414 # Calculate the bands that were skipped 

415 skippedBands = [band for band in mExposure.filters if band not in observation.bands] 

416 

417 return blend, skippedSources, skippedBands 

418 

419 

420class ScarletDeblendConfig(pexConfig.Config): 

421 """MultibandDeblendConfig 

422 

423 Configuration for the multiband deblender. 

424 The parameters are organized by the parameter types, which are 

425 - Stopping Criteria: Used to determine if the fit has converged 

426 - Position Fitting Criteria: Used to fit the positions of the peaks 

427 - Constraints: Used to apply constraints to the peaks and their components 

428 - Other: Parameters that don't fit into the above categories 

429 """ 

430 # Stopping Criteria 

431 minIter = pexConfig.Field(dtype=int, default=15, 

432 doc="Minimum number of iterations before the optimizer is allowed to stop.") 

433 maxIter = pexConfig.Field(dtype=int, default=300, 

434 doc=("Maximum number of iterations to deblend a single parent")) 

435 relativeError = pexConfig.Field(dtype=float, default=1e-2, 

436 doc=("Change in the loss function between iterations to exit fitter. " 

437 "Typically this is `1e-2` if measurements will be made on the " 

438 "flux re-distributed models and `1e-4` when making measurements " 

439 "on the models themselves.")) 

440 

441 # Constraints 

442 morphThresh = pexConfig.Field(dtype=float, default=1, 

443 doc="Fraction of background RMS a pixel must have" 

444 "to be included in the initial morphology") 

445 # Lite Parameters 

446 # All of these parameters (except version) are only valid if version='lite' 

447 version = pexConfig.ChoiceField( 

448 dtype=str, 

449 default="lite", 

450 allowed={ 

451 "old_lite": "scarlet lite from the scarlet main package", 

452 "lite": "LSST optimized version of scarlet for survey data from a single instrument", 

453 }, 

454 doc="The version of scarlet to use.", 

455 ) 

456 optimizer = pexConfig.ChoiceField( 

457 dtype=str, 

458 default="adaprox", 

459 allowed={ 

460 "adaprox": "Proximal ADAM optimization", 

461 "fista": "Accelerated proximal gradient method", 

462 }, 

463 doc="The optimizer to use for fitting parameters and is only used when version='lite'", 

464 ) 

465 morphImage = pexConfig.ChoiceField( 

466 dtype=str, 

467 default="chi2", 

468 allowed={ 

469 "chi2": "Initialize sources on a chi^2 image made from all available bands", 

470 "wavelet": "Initialize sources using a wavelet decomposition of the chi^2 image", 

471 }, 

472 doc="The type of image to use for initializing the morphology. " 

473 "Must be either 'chi2' or 'wavelet'. " 

474 ) 

475 backgroundThresh = pexConfig.Field( 

476 dtype=float, 

477 default=0.25, 

478 doc="Fraction of background to use for a sparsity threshold. " 

479 "This prevents sources from growing unrealistically outside " 

480 "the parent footprint while still modeling flux correctly " 

481 "for bright sources." 

482 ) 

483 maxProxIter = pexConfig.Field( 

484 dtype=int, 

485 default=1, 

486 doc="Maximum number of proximal operator iterations inside of each " 

487 "iteration of the optimizer. " 

488 "This config field is only used if version='lite' and optimizer='adaprox'." 

489 ) 

490 waveletScales = pexConfig.Field( 

491 dtype=int, 

492 default=5, 

493 doc="Number of wavelet scales to use for wavelet initialization. " 

494 "This field is only used when `version`='lite' and `morphImage`='wavelet'." 

495 ) 

496 

497 # Other scarlet paremeters 

498 useWeights = pexConfig.Field( 

499 dtype=bool, default=True, 

500 doc=("Whether or not use use inverse variance weighting." 

501 "If `useWeights` is `False` then flat weights are used")) 

502 modelPsfSize = pexConfig.Field( 

503 dtype=int, default=11, 

504 doc="Model PSF side length in pixels") 

505 modelPsfSigma = pexConfig.Field( 

506 dtype=float, default=0.8, 

507 doc="Define sigma for the model frame PSF") 

508 minSNR = pexConfig.Field( 

509 dtype=float, default=50, 

510 doc="Minimum Signal to noise to accept the source." 

511 "Sources with lower flux will be initialized with the PSF but updated " 

512 "like an ordinary ExtendedSource (known in scarlet as a `CompactSource`).") 

513 saveTemplates = pexConfig.Field( 

514 dtype=bool, default=True, 

515 doc="Whether or not to save the SEDs and templates") 

516 processSingles = pexConfig.Field( 

517 dtype=bool, default=True, 

518 doc="Whether or not to process isolated sources in the deblender") 

519 convolutionType = pexConfig.Field( 

520 dtype=str, default="fft", 

521 doc="Type of convolution to render the model to the observations.\n" 

522 "- 'fft': perform convolutions in Fourier space\n" 

523 "- 'real': peform convolutions in real space.") 

524 sourceModel = pexConfig.Field( 

525 dtype=str, default="double", 

526 doc=("How to determine which model to use for sources, from\n" 

527 "- 'single': use a single component for all sources\n" 

528 "- 'double': use a bulge disk model for all sources\n" 

529 "- 'compact': use a single component model, initialzed with a point source morphology, " 

530 " for all sources\n" 

531 "- 'point': use a point-source model for all sources\n" 

532 "- 'fit: use a PSF fitting model to determine the number of components (not yet implemented)"), 

533 deprecated="This field will be deprecated when the default for `version` is changed to `lite`.", 

534 ) 

535 setSpectra = pexConfig.Field( 

536 dtype=bool, default=True, 

537 doc="Whether or not to solve for the best-fit spectra during initialization. " 

538 "This makes initialization slightly longer, as it requires a convolution " 

539 "to set the optimal spectra, but results in a much better initial log-likelihood " 

540 "and reduced total runtime, with convergence in fewer iterations." 

541 "This option is only used when " 

542 "peaks*area < `maxSpectrumCutoff` will use the improved initialization.") 

543 

544 # Mask-plane restrictions 

545 badMask = pexConfig.ListField( 

546 dtype=str, default=defaultBadPixelMasks, 

547 doc="Whether or not to process isolated sources in the deblender") 

548 statsMask = pexConfig.ListField(dtype=str, default=["SAT", "INTRP", "NO_DATA"], 

549 doc="Mask planes to ignore when performing statistics") 

550 maskLimits = pexConfig.DictField( 

551 keytype=str, 

552 itemtype=float, 

553 default={}, 

554 doc=("Mask planes with the corresponding limit on the fraction of masked pixels. " 

555 "Sources violating this limit will not be deblended. " 

556 "If the fraction is `0` then the limit is a single pixel."), 

557 ) 

558 

559 # Size restrictions 

560 maxNumberOfPeaks = pexConfig.Field( 

561 dtype=int, default=200, 

562 doc=("Only deblend the brightest maxNumberOfPeaks peaks in the parent" 

563 " (<= 0: unlimited)")) 

564 maxFootprintArea = pexConfig.Field( 

565 dtype=int, default=100_000, 

566 doc=("Maximum area for footprints before they are ignored as large; " 

567 "non-positive means no threshold applied")) 

568 maxAreaTimesPeaks = pexConfig.Field( 

569 dtype=int, default=10_000_000, 

570 doc=("Maximum rectangular footprint area * nPeaks in the footprint. " 

571 "This was introduced in DM-33690 to prevent fields that are crowded or have a " 

572 "LSB galaxy that causes memory intensive initialization in scarlet from dominating " 

573 "the overall runtime and/or causing the task to run out of memory. " 

574 "(<= 0: unlimited)") 

575 ) 

576 maxFootprintSize = pexConfig.Field( 

577 dtype=int, default=0, 

578 doc=("Maximum linear dimension for footprints before they are ignored " 

579 "as large; non-positive means no threshold applied")) 

580 minFootprintAxisRatio = pexConfig.Field( 

581 dtype=float, default=0.0, 

582 doc=("Minimum axis ratio for footprints before they are ignored " 

583 "as large; non-positive means no threshold applied")) 

584 maxSpectrumCutoff = pexConfig.Field( 

585 dtype=int, default=1_000_000, 

586 doc=("Maximum number of pixels * number of sources in a blend. " 

587 "This is different than `maxFootprintArea` because this isn't " 

588 "the footprint area but the area of the bounding box that " 

589 "contains the footprint, and is also multiplied by the number of" 

590 "sources in the footprint. This prevents large skinny blends with " 

591 "a high density of sources from running out of memory. " 

592 "If `maxSpectrumCutoff == -1` then there is no cutoff.") 

593 ) 

594 # Failure modes 

595 fallback = pexConfig.Field( 

596 dtype=bool, default=True, 

597 doc="Whether or not to fallback to a smaller number of components if a source does not initialize" 

598 ) 

599 notDeblendedMask = pexConfig.Field( 

600 dtype=str, default="NOT_DEBLENDED", optional=True, 

601 doc="Mask name for footprints not deblended, or None") 

602 catchFailures = pexConfig.Field( 

603 dtype=bool, default=True, 

604 doc=("If True, catch exceptions thrown by the deblender, log them, " 

605 "and set a flag on the parent, instead of letting them propagate up")) 

606 

607 # Other options 

608 columnInheritance = pexConfig.DictField( 

609 keytype=str, itemtype=str, default={ 

610 "deblend_nChild": "deblend_parentNChild", 

611 "deblend_nPeaks": "deblend_parentNPeaks", 

612 "deblend_spectrumInitFlag": "deblend_spectrumInitFlag", 

613 "deblend_blendConvergenceFailedFlag": "deblend_blendConvergenceFailedFlag", 

614 }, 

615 doc="Columns to pass from the parent to the child. " 

616 "The key is the name of the column for the parent record, " 

617 "the value is the name of the column to use for the child." 

618 ) 

619 pseudoColumns = pexConfig.ListField( 

620 dtype=str, default=['merge_peak_sky', 'sky_source'], 

621 doc="Names of flags which should never be deblended." 

622 ) 

623 

624 # Testing options 

625 # Some obs packages and ci packages run the full pipeline on a small 

626 # subset of data to test that the pipeline is functioning properly. 

627 # This is not meant as scientific validation, so it can be useful 

628 # to only run on a small subset of the data that is large enough to 

629 # test the desired pipeline features but not so long that the deblender 

630 # is the tall pole in terms of execution times. 

631 useCiLimits = pexConfig.Field( 

632 dtype=bool, default=False, 

633 doc="Limit the number of sources deblended for CI to prevent long build times") 

634 ciDeblendChildRange = pexConfig.ListField( 

635 dtype=int, default=[5, 10], 

636 doc="Only deblend parent Footprints with a number of peaks in the (inclusive) range indicated." 

637 "If `useCiLimits==False` then this parameter is ignored.") 

638 ciNumParentsToDeblend = pexConfig.Field( 

639 dtype=int, default=10, 

640 doc="Only use the first `ciNumParentsToDeblend` parent footprints with a total peak count " 

641 "within `ciDebledChildRange`. " 

642 "If `useCiLimits==False` then this parameter is ignored.") 

643 

644 

645class ScarletDeblendTask(pipeBase.Task): 

646 """ScarletDeblendTask 

647 

648 Split blended sources into individual sources. 

649 

650 This task has no return value; it only modifies the SourceCatalog in-place. 

651 """ 

652 ConfigClass = ScarletDeblendConfig 

653 _DefaultName = "scarletDeblend" 

654 

655 def __init__(self, schema, peakSchema=None, **kwargs): 

656 """Create the task, adding necessary fields to the given schema. 

657 

658 Parameters 

659 ---------- 

660 schema : `lsst.afw.table.schema.schema.Schema` 

661 Schema object for measurement fields; will be modified in-place. 

662 peakSchema : `lsst.afw.table.schema.schema.Schema` 

663 Schema of Footprint Peaks that will be passed to the deblender. 

664 Any fields beyond the PeakTable minimal schema will be transferred 

665 to the main source Schema. If None, no fields will be transferred 

666 from the Peaks. 

667 filters : list of str 

668 Names of the filters used for the eposures. This is needed to store 

669 the SED as a field 

670 **kwargs 

671 Passed to Task.__init__. 

672 """ 

673 pipeBase.Task.__init__(self, **kwargs) 

674 

675 peakMinimalSchema = afwDet.PeakTable.makeMinimalSchema() 

676 if peakSchema is None: 

677 # In this case, the peakSchemaMapper will transfer nothing, but 

678 # we'll still have one 

679 # to simplify downstream code 

680 self.peakSchemaMapper = afwTable.SchemaMapper(peakMinimalSchema, schema) 

681 else: 

682 self.peakSchemaMapper = afwTable.SchemaMapper(peakSchema, schema) 

683 for item in peakSchema: 

684 if item.key not in peakMinimalSchema: 

685 self.peakSchemaMapper.addMapping(item.key, item.field) 

686 # Because SchemaMapper makes a copy of the output schema 

687 # you give its ctor, it isn't updating this Schema in 

688 # place. That's probably a design flaw, but in the 

689 # meantime, we'll keep that schema in sync with the 

690 # peakSchemaMapper.getOutputSchema() manually, by adding 

691 # the same fields to both. 

692 schema.addField(item.field) 

693 assert schema == self.peakSchemaMapper.getOutputSchema(), "Logic bug mapping schemas" 

694 self._addSchemaKeys(schema) 

695 self.schema = schema 

696 self.toCopyFromParent = [item.key for item in self.schema 

697 if item.field.getName().startswith("merge_footprint")] 

698 

699 def _addSchemaKeys(self, schema): 

700 """Add deblender specific keys to the schema 

701 """ 

702 # Parent (blend) fields 

703 self.runtimeKey = schema.addField('deblend_runtime', type=np.float32, doc='runtime in ms') 

704 self.iterKey = schema.addField('deblend_iterations', type=np.int32, doc='iterations to converge') 

705 self.nChildKey = schema.addField('deblend_nChild', type=np.int32, 

706 doc='Number of children this object has (defaults to 0)') 

707 self.nPeaksKey = schema.addField("deblend_nPeaks", type=np.int32, 

708 doc="Number of initial peaks in the blend. " 

709 "This includes peaks that may have been culled " 

710 "during deblending or failed to deblend") 

711 # Skipped flags 

712 self.deblendSkippedKey = schema.addField('deblend_skipped', type='Flag', 

713 doc="Deblender skipped this source") 

714 self.isolatedParentKey = schema.addField('deblend_isolatedParent', type='Flag', 

715 doc='The source has only a single peak ' 

716 'and was not deblended') 

717 self.pseudoKey = schema.addField('deblend_isPseudo', type='Flag', 

718 doc='The source is identified as a "pseudo" source and ' 

719 'was not deblended') 

720 self.tooManyPeaksKey = schema.addField('deblend_tooManyPeaks', type='Flag', 

721 doc='Source had too many peaks; ' 

722 'only the brightest were included') 

723 self.tooBigKey = schema.addField('deblend_parentTooBig', type='Flag', 

724 doc='Parent footprint covered too many pixels') 

725 self.maskedKey = schema.addField('deblend_masked', type='Flag', 

726 doc='Parent footprint had too many masked pixels') 

727 # Convergence flags 

728 self.sedNotConvergedKey = schema.addField('deblend_sedConvergenceFailed', type='Flag', 

729 doc='scarlet sed optimization did not converge before' 

730 'config.maxIter') 

731 self.morphNotConvergedKey = schema.addField('deblend_morphConvergenceFailed', type='Flag', 

732 doc='scarlet morph optimization did not converge before' 

733 'config.maxIter') 

734 self.blendConvergenceFailedFlagKey = schema.addField('deblend_blendConvergenceFailedFlag', 

735 type='Flag', 

736 doc='at least one source in the blend' 

737 'failed to converge') 

738 # Error flags 

739 self.deblendFailedKey = schema.addField('deblend_failed', type='Flag', 

740 doc="Deblending failed on source") 

741 self.deblendErrorKey = schema.addField('deblend_error', type="String", size=25, 

742 doc='Name of error if the blend failed') 

743 self.incompleteDataKey = schema.addField('deblend_incompleteData', type='Flag', 

744 doc='True when a blend has at least one band ' 

745 'that could not generate a PSF and was ' 

746 'not included in the model.') 

747 # Deblended source fields 

748 self.peakCenter = afwTable.Point2IKey.addFields(schema, name="deblend_peak_center", 

749 doc="Center used to apply constraints in scarlet", 

750 unit="pixel") 

751 self.peakIdKey = schema.addField("deblend_peakId", type=np.int32, 

752 doc="ID of the peak in the parent footprint. " 

753 "This is not unique, but the combination of 'parent'" 

754 "and 'peakId' should be for all child sources. " 

755 "Top level blends with no parents have 'peakId=0'") 

756 self.modelCenterFlux = schema.addField('deblend_peak_instFlux', type=float, units='count', 

757 doc="The instFlux at the peak position of deblended mode") 

758 self.modelTypeKey = schema.addField("deblend_modelType", type="String", size=25, 

759 doc="The type of model used, for example " 

760 "MultiExtendedSource, SingleExtendedSource, PointSource") 

761 self.parentNPeaksKey = schema.addField("deblend_parentNPeaks", type=np.int32, 

762 doc="deblend_nPeaks from this records parent.") 

763 self.parentNChildKey = schema.addField("deblend_parentNChild", type=np.int32, 

764 doc="deblend_nChild from this records parent.") 

765 self.scarletFluxKey = schema.addField("deblend_scarletFlux", type=np.float32, 

766 doc="Flux measurement from scarlet") 

767 self.scarletLogLKey = schema.addField("deblend_logL", type=np.float32, 

768 doc="Final logL, used to identify regressions in scarlet.") 

769 self.edgePixelsKey = schema.addField('deblend_edgePixels', type='Flag', 

770 doc='Source had flux on the edge of the parent footprint') 

771 self.scarletSpectrumInitKey = schema.addField("deblend_spectrumInitFlag", type='Flag', 

772 doc="True when scarlet initializes sources " 

773 "in the blend with a more accurate spectrum. " 

774 "The algorithm uses a lot of memory, " 

775 "so large dense blends will use " 

776 "a less accurate initialization.") 

777 self.nComponentsKey = schema.addField("deblend_nComponents", type=np.int32, 

778 doc="Number of components in a ScarletLiteSource. " 

779 "If `config.version != 'lite'`then " 

780 "this column is set to zero.") 

781 self.psfKey = schema.addField('deblend_deblendedAsPsf', type='Flag', 

782 doc='Deblender thought this source looked like a PSF') 

783 self.coverageKey = schema.addField('deblend_dataCoverage', type=np.float32, 

784 doc='Fraction of pixels with data. ' 

785 'In other words, 1 - fraction of pixels with NO_DATA set.') 

786 # Blendedness/classification metrics 

787 self.maxOverlapKey = schema.addField("deblend_maxOverlap", type=np.float32, 

788 doc="Maximum overlap with all of the other neighbors flux " 

789 "combined." 

790 "This is useful as a metric for determining how blended a " 

791 "source is because if it only overlaps with other sources " 

792 "at or below the noise level, it is likely to be a mostly " 

793 "isolated source in the deconvolved model frame.") 

794 self.fluxOverlapKey = schema.addField("deblend_fluxOverlap", type=np.float32, 

795 doc="This is the total flux from neighboring objects that " 

796 "overlaps with this source.") 

797 self.fluxOverlapFractionKey = schema.addField("deblend_fluxOverlapFraction", type=np.float32, 

798 doc="This is the fraction of " 

799 "`flux from neighbors/source flux` " 

800 "for a given source within the source's" 

801 "footprint.") 

802 self.blendednessKey = schema.addField("deblend_blendedness", type=np.float32, 

803 doc="The Bosch et al. 2018 metric for 'blendedness.' ") 

804 

805 @timeMethod 

806 def run(self, mExposure, mergedSources): 

807 """Get the psf from each exposure and then run deblend(). 

808 

809 Parameters 

810 ---------- 

811 mExposure : `MultibandExposure` 

812 The exposures should be co-added images of the same 

813 shape and region of the sky. 

814 mergedSources : `SourceCatalog` 

815 The merged `SourceCatalog` that contains parent footprints 

816 to (potentially) deblend. 

817 

818 Returns 

819 ------- 

820 templateCatalogs: dict 

821 Keys are the names of the filters and the values are 

822 `lsst.afw.table.source.source.SourceCatalog`'s. 

823 These are catalogs with heavy footprints that are the templates 

824 created by the multiband templates. 

825 """ 

826 return self.deblend(mExposure, mergedSources) 

827 

828 @timeMethod 

829 def deblend(self, mExposure, catalog): 

830 """Deblend a data cube of multiband images 

831 

832 Parameters 

833 ---------- 

834 mExposure : `MultibandExposure` 

835 The exposures should be co-added images of the same 

836 shape and region of the sky. 

837 catalog : `SourceCatalog` 

838 The merged `SourceCatalog` that contains parent footprints 

839 to (potentially) deblend. The new deblended sources are 

840 appended to this catalog in place. 

841 

842 Returns 

843 ------- 

844 catalogs : `dict` or `None` 

845 Keys are the names of the filters and the values are 

846 `lsst.afw.table.source.source.SourceCatalog`'s. 

847 These are catalogs with heavy footprints that are the templates 

848 created by the multiband templates. 

849 """ 

850 import time 

851 

852 # Cull footprints if required by ci 

853 if self.config.useCiLimits: 

854 self.log.info("Using CI catalog limits, the original number of sources to deblend was %d.", 

855 len(catalog)) 

856 # Select parents with a number of children in the range 

857 # config.ciDeblendChildRange 

858 minChildren, maxChildren = self.config.ciDeblendChildRange 

859 nPeaks = np.array([len(src.getFootprint().peaks) for src in catalog]) 

860 childrenInRange = np.where((nPeaks >= minChildren) & (nPeaks <= maxChildren))[0] 

861 if len(childrenInRange) < self.config.ciNumParentsToDeblend: 

862 raise ValueError("Fewer than ciNumParentsToDeblend children were contained in the range " 

863 "indicated by ciDeblendChildRange. Adjust this range to include more " 

864 "parents.") 

865 # Keep all of the isolated parents and the first 

866 # `ciNumParentsToDeblend` children 

867 parents = nPeaks == 1 

868 children = np.zeros((len(catalog),), dtype=bool) 

869 children[childrenInRange[:self.config.ciNumParentsToDeblend]] = True 

870 catalog = catalog[parents | children] 

871 # We need to update the IdFactory, otherwise the the source ids 

872 # will not be sequential 

873 idFactory = catalog.getIdFactory() 

874 maxId = np.max(catalog["id"]) 

875 idFactory.notify(maxId) 

876 

877 self.log.info("Deblending %d sources in %d exposure bands", len(catalog), len(mExposure)) 

878 periodicLog = PeriodicLogger(self.log) 

879 

880 # Create a set of wavelet coefficients if using wavelet initialization 

881 if self.config.morphImage == "wavelet": 

882 images = mExposure.image.array 

883 variance = mExposure.variance.array 

884 wavelets = scl.detect.get_detect_wavelets(images, variance, scales=self.config.waveletScales) 

885 else: 

886 wavelets = None 

887 

888 # Add the NOT_DEBLENDED mask to the mask plane in each band 

889 if self.config.notDeblendedMask: 

890 for mask in mExposure.mask: 

891 mask.addMaskPlane(self.config.notDeblendedMask) 

892 

893 # Initialize the persistable data model 

894 modelPsf = scl.utils.integrated_circular_gaussian(sigma=self.config.modelPsfSigma) 

895 dataModel = scl.io.ScarletModelData(modelPsf) 

896 

897 # Initialize the monotonicity operator with a size of 101 x 101 pixels. 

898 # Note: If a component is > 101x101 in either axis then the 

899 # monotonicity operator will resize itself. 

900 monotonicity = scl.operators.Monotonicity((101, 101)) 

901 

902 nParents = len(catalog) 

903 nDeblendedParents = 0 

904 skippedParents = [] 

905 for parentIndex in range(nParents): 

906 parent = catalog[parentIndex] 

907 foot = parent.getFootprint() 

908 bbox = foot.getBBox() 

909 peaks = foot.getPeaks() 

910 

911 # Since we use the first peak for the parent object, we should 

912 # propagate its flags to the parent source. 

913 parent.assign(peaks[0], self.peakSchemaMapper) 

914 

915 # Block of conditions for skipping a parent with multiple children 

916 if (skipArgs := self._checkSkipped(parent, mExposure)) is not None: 

917 self._skipParent(parent, *skipArgs) 

918 skippedParents.append(parentIndex) 

919 continue 

920 

921 nDeblendedParents += 1 

922 self.log.trace("Parent %d: deblending %d peaks", parent.getId(), len(peaks)) 

923 # Run the deblender 

924 blendError = None 

925 

926 # Choose whether or not to use improved spectral initialization. 

927 # This significantly cuts down on the number of iterations 

928 # that the optimizer needs and usually results in a better 

929 # fit. 

930 # But using least squares on a very large blend causes memory 

931 # issues, so it is not done for large blends 

932 if self.config.setSpectra: 

933 if self.config.maxSpectrumCutoff <= 0: 

934 spectrumInit = True 

935 else: 

936 spectrumInit = len(foot.peaks) * bbox.getArea() < self.config.maxSpectrumCutoff 

937 else: 

938 spectrumInit = False 

939 

940 try: 

941 t0 = time.monotonic() 

942 # Build the parameter lists with the same ordering 

943 if self.config.version == "lite": 

944 blend, skippedSources, skippedBands = deblend( 

945 mExposure=mExposure, 

946 modelPsf=modelPsf, 

947 footprint=foot, 

948 config=self.config, 

949 spectrumInit=spectrumInit, 

950 wavelets=wavelets, 

951 monotonicity=monotonicity, 

952 ) 

953 elif self.config.version == "old_lite": 

954 blend, skippedSources, skippedBands = deblend_old_lite( 

955 mExposure=mExposure, 

956 modelPsf=modelPsf, 

957 footprint=foot, 

958 config=self.config, 

959 spectrumInit=spectrumInit, 

960 wavelets=wavelets, 

961 ) 

962 tf = time.monotonic() 

963 runtime = (tf-t0)*1000 

964 converged = _checkBlendConvergence(blend, self.config.relativeError) 

965 # Store the number of components in the blend 

966 nComponents = len(blend.components) 

967 nChild = len(blend.sources) 

968 parent.set(self.incompleteDataKey, len(skippedBands) > 0) 

969 # Catch all errors and filter out the ones that we know about 

970 except Exception as e: 

971 blendError = type(e).__name__ 

972 if isinstance(e, ScarletGradientError): 

973 parent.set(self.iterKey, e.iterations) 

974 else: 

975 blendError = "UnknownError" 

976 if self.config.catchFailures: 

977 # Make it easy to find UnknownErrors in the log file 

978 self.log.warn("UnknownError") 

979 import traceback 

980 traceback.print_exc() 

981 else: 

982 raise 

983 

984 self._skipParent( 

985 parent=parent, 

986 skipKey=self.deblendFailedKey, 

987 logMessage=f"Unable to deblend source {parent.getId}: {blendError}", 

988 ) 

989 parent.set(self.deblendErrorKey, blendError) 

990 skippedParents.append(parentIndex) 

991 continue 

992 

993 # Update the parent record with the deblending results 

994 self._updateParentRecord( 

995 parent=parent, 

996 nPeaks=len(peaks), 

997 nChild=nChild, 

998 nComponents=nComponents, 

999 runtime=runtime, 

1000 iterations=len(blend.loss), 

1001 logL=blend.loss[-1], 

1002 spectrumInit=spectrumInit, 

1003 converged=converged, 

1004 ) 

1005 

1006 # Add each deblended source to the catalog 

1007 for k, scarletSource in enumerate(blend.sources): 

1008 # Skip any sources with no flux or that scarlet skipped because 

1009 # it could not initialize 

1010 if k in skippedSources or (self.config.version == "lite" and scarletSource.is_null): 

1011 # No need to propagate anything 

1012 continue 

1013 parent.set(self.deblendSkippedKey, False) 

1014 

1015 # Add all fields except the HeavyFootprint to the 

1016 # source record 

1017 sourceRecord = self._addChild( 

1018 parent=parent, 

1019 peak=scarletSource.detectedPeak, 

1020 catalog=catalog, 

1021 scarletSource=scarletSource, 

1022 ) 

1023 scarletSource.record_id = sourceRecord.getId() 

1024 scarletSource.peak_id = scarletSource.detectedPeak.getId() 

1025 

1026 # Store the blend information so that it can be persisted 

1027 if self.config.version == "lite": 

1028 blendData = scl.io.ScarletBlendData.from_blend(blend, blend.psfCenter) 

1029 else: 

1030 blendData = io.oldScarletToData(blend, blend.psfCenter, bbox.getMin()) 

1031 dataModel.blends[parent.getId()] = blendData 

1032 

1033 # Log a message if it has been a while since the last log. 

1034 periodicLog.log("Deblended %d parent sources out of %d", parentIndex + 1, nParents) 

1035 

1036 # Clear the cached values in scarlet to clear out memory 

1037 if self.config.version == "old_lite": 

1038 import scarlet 

1039 scarlet.cache.Cache._cache = {} 

1040 

1041 # Update the mExposure mask with the footprint of skipped parents 

1042 if self.config.notDeblendedMask: 

1043 for mask in mExposure.mask: 

1044 for parentIndex in skippedParents: 

1045 fp = catalog[parentIndex].getFootprint() 

1046 fp.spans.setMask(mask, mask.getPlaneBitMask(self.config.notDeblendedMask)) 

1047 

1048 self.log.info("Deblender results: of %d parent sources, %d were deblended, " 

1049 "creating %d children, for a total of %d sources", 

1050 nParents, nDeblendedParents, len(catalog)-nParents, len(catalog)) 

1051 return catalog, dataModel 

1052 

1053 def _isLargeFootprint(self, footprint): 

1054 """Returns whether a Footprint is large 

1055 

1056 'Large' is defined by thresholds on the area, size and axis ratio, 

1057 and total area of the bounding box multiplied by 

1058 the number of children. 

1059 These may be disabled independently by configuring them to be 

1060 non-positive. 

1061 """ 

1062 if self.config.maxFootprintArea > 0 and footprint.getArea() > self.config.maxFootprintArea: 

1063 return True 

1064 if self.config.maxFootprintSize > 0: 

1065 bbox = footprint.getBBox() 

1066 if max(bbox.getWidth(), bbox.getHeight()) > self.config.maxFootprintSize: 

1067 return True 

1068 if self.config.minFootprintAxisRatio > 0: 

1069 axes = afwEll.Axes(footprint.getShape()) 

1070 if axes.getB() < self.config.minFootprintAxisRatio*axes.getA(): 

1071 return True 

1072 if self.config.maxAreaTimesPeaks > 0: 

1073 if footprint.getBBox().getArea() * len(footprint.peaks) > self.config.maxAreaTimesPeaks: 

1074 return True 

1075 return False 

1076 

1077 def _isMasked(self, footprint, mExposure): 

1078 """Returns whether the footprint violates the mask limits 

1079 

1080 Parameters 

1081 ---------- 

1082 footprint : `lsst.afw.detection.Footprint` 

1083 The footprint to check for masked pixels 

1084 mMask : `lsst.afw.image.MaskX` 

1085 The mask plane to check for masked pixels in the `footprint`. 

1086 

1087 Returns 

1088 ------- 

1089 isMasked : `bool` 

1090 `True` if `self.config.maskPlaneLimits` is less than the 

1091 fraction of pixels for a given mask in 

1092 `self.config.maskLimits`. 

1093 """ 

1094 bbox = footprint.getBBox() 

1095 mask = np.bitwise_or.reduce(mExposure.mask[:, bbox].array, axis=0) 

1096 size = float(footprint.getArea()) 

1097 for maskName, limit in self.config.maskLimits.items(): 

1098 maskVal = mExposure.mask.getPlaneBitMask(maskName) 

1099 _mask = afwImage.MaskX(mask & maskVal, xy0=bbox.getMin()) 

1100 # spanset of masked pixels 

1101 maskedSpan = footprint.spans.intersect(_mask, maskVal) 

1102 if (maskedSpan.getArea())/size > limit: 

1103 return True 

1104 return False 

1105 

1106 def _skipParent(self, parent, skipKey, logMessage): 

1107 """Update a parent record that is not being deblended. 

1108 

1109 This is a fairly trivial function but is implemented to ensure 

1110 that a skipped parent updates the appropriate columns 

1111 consistently, and always has a flag to mark the reason that 

1112 it is being skipped. 

1113 

1114 Parameters 

1115 ---------- 

1116 parent : `lsst.afw.table.source.source.SourceRecord` 

1117 The parent record to flag as skipped. 

1118 skipKey : `bool` 

1119 The name of the flag to mark the reason for skipping. 

1120 logMessage : `str` 

1121 The message to display in a log.trace when a source 

1122 is skipped. 

1123 """ 

1124 if logMessage is not None: 

1125 self.log.trace(logMessage) 

1126 self._updateParentRecord( 

1127 parent=parent, 

1128 nPeaks=len(parent.getFootprint().peaks), 

1129 nChild=0, 

1130 nComponents=0, 

1131 runtime=np.nan, 

1132 iterations=0, 

1133 logL=np.nan, 

1134 spectrumInit=False, 

1135 converged=False, 

1136 ) 

1137 

1138 # Mark the source as skipped by the deblender and 

1139 # flag the reason why. 

1140 parent.set(self.deblendSkippedKey, True) 

1141 parent.set(skipKey, True) 

1142 

1143 def _checkSkipped(self, parent, mExposure): 

1144 """Update a parent record that is not being deblended. 

1145 

1146 This is a fairly trivial function but is implemented to ensure 

1147 that a skipped parent updates the appropriate columns 

1148 consistently, and always has a flag to mark the reason that 

1149 it is being skipped. 

1150 

1151 Parameters 

1152 ---------- 

1153 parent : `lsst.afw.table.source.source.SourceRecord` 

1154 The parent record to flag as skipped. 

1155 mExposure : `MultibandExposure` 

1156 The exposures should be co-added images of the same 

1157 shape and region of the sky. 

1158 Returns 

1159 ------- 

1160 skip: `bool` 

1161 `True` if the deblender will skip the parent 

1162 """ 

1163 skipKey = None 

1164 skipMessage = None 

1165 footprint = parent.getFootprint() 

1166 if len(footprint.peaks) < 2 and not self.config.processSingles: 

1167 # Skip isolated sources unless processSingles is turned on. 

1168 # Note: this does not flag isolated sources as skipped or 

1169 # set the NOT_DEBLENDED mask in the exposure, 

1170 # since these aren't really any skipped blends. 

1171 skipKey = self.isolatedParentKey 

1172 elif isPseudoSource(parent, self.config.pseudoColumns): 

1173 # We also skip pseudo sources, like sky objects, which 

1174 # are intended to be skipped. 

1175 skipKey = self.pseudoKey 

1176 if self._isLargeFootprint(footprint): 

1177 # The footprint is above the maximum footprint size limit 

1178 skipKey = self.tooBigKey 

1179 skipMessage = f"Parent {parent.getId()}: skipping large footprint" 

1180 elif self._isMasked(footprint, mExposure): 

1181 # The footprint exceeds the maximum number of masked pixels 

1182 skipKey = self.maskedKey 

1183 skipMessage = f"Parent {parent.getId()}: skipping masked footprint" 

1184 elif self.config.maxNumberOfPeaks > 0 and len(footprint.peaks) > self.config.maxNumberOfPeaks: 

1185 # Unlike meas_deblender, in scarlet we skip the entire blend 

1186 # if the number of peaks exceeds max peaks, since neglecting 

1187 # to model any peaks often results in catastrophic failure 

1188 # of scarlet to generate models for the brighter sources. 

1189 skipKey = self.tooManyPeaksKey 

1190 skipMessage = f"Parent {parent.getId()}: skipping blend with too many peaks" 

1191 if skipKey is not None: 

1192 return (skipKey, skipMessage) 

1193 return None 

1194 

1195 def setSkipFlags(self, mExposure, catalog): 

1196 """Set the skip flags for all of the parent sources 

1197 

1198 This is mostly used for testing which parent sources will be deblended 

1199 and which will be skipped based on the current configuration options. 

1200 Skipped sources will have the appropriate flags set in place in the 

1201 catalog. 

1202 

1203 Parameters 

1204 ---------- 

1205 mExposure : `MultibandExposure` 

1206 The exposures should be co-added images of the same 

1207 shape and region of the sky. 

1208 catalog : `SourceCatalog` 

1209 The merged `SourceCatalog` that contains parent footprints 

1210 to (potentially) deblend. The new deblended sources are 

1211 appended to this catalog in place. 

1212 """ 

1213 for src in catalog: 

1214 if skipArgs := self._checkSkipped(src, mExposure) is not None: 

1215 self._skipParent(src, *skipArgs) 

1216 

1217 def _updateParentRecord(self, parent, nPeaks, nChild, nComponents, 

1218 runtime, iterations, logL, spectrumInit, converged): 

1219 """Update a parent record in all of the single band catalogs. 

1220 

1221 Ensure that all locations that update a parent record, 

1222 whether it is skipped or updated after deblending, 

1223 update all of the appropriate columns. 

1224 

1225 Parameters 

1226 ---------- 

1227 parent : `lsst.afw.table.source.source.SourceRecord` 

1228 The parent record to update. 

1229 nPeaks : `int` 

1230 Number of peaks in the parent footprint. 

1231 nChild : `int` 

1232 Number of children deblended from the parent. 

1233 This may differ from `nPeaks` if some of the peaks 

1234 were culled and have no deblended model. 

1235 nComponents : `int` 

1236 Total number of components in the parent. 

1237 This is usually different than the number of children, 

1238 since it is common for a single source to have multiple 

1239 components. 

1240 runtime : `float` 

1241 Total runtime for deblending. 

1242 iterations : `int` 

1243 Total number of iterations in scarlet before convergence. 

1244 logL : `float` 

1245 Final log likelihood of the blend. 

1246 spectrumInit : `bool` 

1247 True when scarlet used `set_spectra` to initialize all 

1248 sources with better initial intensities. 

1249 converged : `bool` 

1250 True when the optimizer reached convergence before 

1251 reaching the maximum number of iterations. 

1252 """ 

1253 parent.set(self.nPeaksKey, nPeaks) 

1254 parent.set(self.nChildKey, nChild) 

1255 parent.set(self.nComponentsKey, nComponents) 

1256 parent.set(self.runtimeKey, runtime) 

1257 parent.set(self.iterKey, iterations) 

1258 parent.set(self.scarletLogLKey, logL) 

1259 parent.set(self.scarletSpectrumInitKey, spectrumInit) 

1260 parent.set(self.blendConvergenceFailedFlagKey, converged) 

1261 

1262 def _addChild(self, parent, peak, catalog, scarletSource): 

1263 """Add a child to a catalog. 

1264 

1265 This creates a new child in the source catalog, 

1266 assigning it a parent id, and adding all columns 

1267 that are independent across all filter bands. 

1268 

1269 Parameters 

1270 ---------- 

1271 parent : `lsst.afw.table.source.source.SourceRecord` 

1272 The parent of the new child record. 

1273 peak : `lsst.afw.table.PeakRecord` 

1274 The peak record for the peak from the parent peak catalog. 

1275 catalog : `lsst.afw.table.source.source.SourceCatalog` 

1276 The merged `SourceCatalog` that contains parent footprints 

1277 to (potentially) deblend. 

1278 scarletSource : `scarlet.Component` 

1279 The scarlet model for the new source record. 

1280 """ 

1281 src = catalog.addNew() 

1282 for key in self.toCopyFromParent: 

1283 src.set(key, parent.get(key)) 

1284 # The peak catalog is the same for all bands, 

1285 # so we just use the first peak catalog 

1286 src.assign(peak, self.peakSchemaMapper) 

1287 src.setParent(parent.getId()) 

1288 src.set(self.nPeaksKey, 1) 

1289 # Set the psf key based on whether or not the source was 

1290 # deblended using the PointSource model. 

1291 # This key is not that useful anymore since we now keep track of 

1292 # `modelType`, but we continue to propagate it in case code downstream 

1293 # is expecting it. 

1294 src.set(self.psfKey, scarletSource.__class__.__name__ == "PointSource") 

1295 src.set(self.modelTypeKey, scarletSource.__class__.__name__) 

1296 # We set the runtime to zero so that summing up the 

1297 # runtime column will give the total time spent 

1298 # running the deblender for the catalog. 

1299 src.set(self.runtimeKey, 0) 

1300 

1301 # Set the position of the peak from the parent footprint 

1302 # This will make it easier to match the same source across 

1303 # deblenders and across observations, where the peak 

1304 # position is unlikely to change unless enough time passes 

1305 # for a source to move on the sky. 

1306 src.set(self.peakCenter, geom.Point2I(peak["i_x"], peak["i_y"])) 

1307 src.set(self.peakIdKey, peak["id"]) 

1308 

1309 # Store the number of components for the source 

1310 src.set(self.nComponentsKey, len(scarletSource.components)) 

1311 

1312 # Flag sources missing one or more bands 

1313 src.set(self.incompleteDataKey, parent.get(self.incompleteDataKey)) 

1314 

1315 # Propagate columns from the parent to the child 

1316 for parentColumn, childColumn in self.config.columnInheritance.items(): 

1317 src.set(childColumn, parent.get(parentColumn)) 

1318 

1319 return src